OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
132 } | 132 } |
133 #endif | 133 #endif |
134 | 134 |
135 // Classic mode functions and builtins need to replace the receiver with the | 135 // Classic mode functions and builtins need to replace the receiver with the |
136 // global proxy when called as functions (without an explicit receiver | 136 // global proxy when called as functions (without an explicit receiver |
137 // object). | 137 // object). |
138 if (info->is_classic_mode() && !info->is_native()) { | 138 if (info->is_classic_mode() && !info->is_native()) { |
139 Label ok; | 139 Label ok; |
140 // +1 for return address. | 140 // +1 for return address. |
141 StackArgumentsAccessor args(rsp, info->scope()->num_parameters()); | 141 StackArgumentsAccessor args(rsp, info->scope()->num_parameters()); |
142 __ movq(rcx, args.GetReceiverOperand()); | 142 __ movp(rcx, args.GetReceiverOperand()); |
143 | 143 |
144 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 144 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); |
145 __ j(not_equal, &ok, Label::kNear); | 145 __ j(not_equal, &ok, Label::kNear); |
146 | 146 |
147 __ movq(rcx, GlobalObjectOperand()); | 147 __ movp(rcx, GlobalObjectOperand()); |
148 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); | 148 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); |
149 | 149 |
150 __ movq(args.GetReceiverOperand(), rcx); | 150 __ movp(args.GetReceiverOperand(), rcx); |
151 | 151 |
152 __ bind(&ok); | 152 __ bind(&ok); |
153 } | 153 } |
154 | 154 |
155 // Open a frame scope to indicate that there is a frame on the stack. The | 155 // Open a frame scope to indicate that there is a frame on the stack. The |
156 // MANUAL indicates that the scope shouldn't actually generate code to set up | 156 // MANUAL indicates that the scope shouldn't actually generate code to set up |
157 // the frame (that is done below). | 157 // the frame (that is done below). |
158 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 158 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
159 | 159 |
160 info->set_prologue_offset(masm_->pc_offset()); | 160 info->set_prologue_offset(masm_->pc_offset()); |
(...skipping 27 matching lines...) Expand all Loading... |
188 __ CallRuntime(Runtime::kNewGlobalContext, 2); | 188 __ CallRuntime(Runtime::kNewGlobalContext, 2); |
189 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 189 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
190 FastNewContextStub stub(heap_slots); | 190 FastNewContextStub stub(heap_slots); |
191 __ CallStub(&stub); | 191 __ CallStub(&stub); |
192 } else { | 192 } else { |
193 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 193 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
194 } | 194 } |
195 function_in_register = false; | 195 function_in_register = false; |
196 // Context is returned in both rax and rsi. It replaces the context | 196 // Context is returned in both rax and rsi. It replaces the context |
197 // passed to us. It's saved in the stack and kept live in rsi. | 197 // passed to us. It's saved in the stack and kept live in rsi. |
198 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); | 198 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); |
199 | 199 |
200 // Copy any necessary parameters into the context. | 200 // Copy any necessary parameters into the context. |
201 int num_parameters = info->scope()->num_parameters(); | 201 int num_parameters = info->scope()->num_parameters(); |
202 for (int i = 0; i < num_parameters; i++) { | 202 for (int i = 0; i < num_parameters; i++) { |
203 Variable* var = scope()->parameter(i); | 203 Variable* var = scope()->parameter(i); |
204 if (var->IsContextSlot()) { | 204 if (var->IsContextSlot()) { |
205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 205 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
206 (num_parameters - 1 - i) * kPointerSize; | 206 (num_parameters - 1 - i) * kPointerSize; |
207 // Load parameter from stack. | 207 // Load parameter from stack. |
208 __ movq(rax, Operand(rbp, parameter_offset)); | 208 __ movp(rax, Operand(rbp, parameter_offset)); |
209 // Store it in the context. | 209 // Store it in the context. |
210 int context_offset = Context::SlotOffset(var->index()); | 210 int context_offset = Context::SlotOffset(var->index()); |
211 __ movq(Operand(rsi, context_offset), rax); | 211 __ movp(Operand(rsi, context_offset), rax); |
212 // Update the write barrier. This clobbers rax and rbx. | 212 // Update the write barrier. This clobbers rax and rbx. |
213 __ RecordWriteContextSlot( | 213 __ RecordWriteContextSlot( |
214 rsi, context_offset, rax, rbx, kDontSaveFPRegs); | 214 rsi, context_offset, rax, rbx, kDontSaveFPRegs); |
215 } | 215 } |
216 } | 216 } |
217 } | 217 } |
218 | 218 |
219 // Possibly allocate an arguments object. | 219 // Possibly allocate an arguments object. |
220 Variable* arguments = scope()->arguments(); | 220 Variable* arguments = scope()->arguments(); |
221 if (arguments != NULL) { | 221 if (arguments != NULL) { |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
311 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); | 311 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); |
312 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset), | 312 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset), |
313 Smi::FromInt(-delta)); | 313 Smi::FromInt(-delta)); |
314 } | 314 } |
315 | 315 |
316 | 316 |
317 void FullCodeGenerator::EmitProfilingCounterReset() { | 317 void FullCodeGenerator::EmitProfilingCounterReset() { |
318 int reset_value = FLAG_interrupt_budget; | 318 int reset_value = FLAG_interrupt_budget; |
319 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); | 319 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); |
320 __ Move(kScratchRegister, Smi::FromInt(reset_value)); | 320 __ Move(kScratchRegister, Smi::FromInt(reset_value)); |
321 __ movq(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister); | 321 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister); |
322 } | 322 } |
323 | 323 |
324 | 324 |
325 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 325 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
326 Label* back_edge_target) { | 326 Label* back_edge_target) { |
327 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 327 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
328 Label ok; | 328 Label ok; |
329 | 329 |
330 ASSERT(back_edge_target->is_bound()); | 330 ASSERT(back_edge_target->is_bound()); |
331 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 331 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
381 __ bind(&ok); | 381 __ bind(&ok); |
382 #ifdef DEBUG | 382 #ifdef DEBUG |
383 // Add a label for checking the size of the code used for returning. | 383 // Add a label for checking the size of the code used for returning. |
384 Label check_exit_codesize; | 384 Label check_exit_codesize; |
385 masm_->bind(&check_exit_codesize); | 385 masm_->bind(&check_exit_codesize); |
386 #endif | 386 #endif |
387 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 387 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
388 __ RecordJSReturn(); | 388 __ RecordJSReturn(); |
389 // Do not use the leave instruction here because it is too short to | 389 // Do not use the leave instruction here because it is too short to |
390 // patch with the code required by the debugger. | 390 // patch with the code required by the debugger. |
391 __ movq(rsp, rbp); | 391 __ movp(rsp, rbp); |
392 __ pop(rbp); | 392 __ pop(rbp); |
393 int no_frame_start = masm_->pc_offset(); | 393 int no_frame_start = masm_->pc_offset(); |
394 | 394 |
395 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; | 395 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; |
396 __ Ret(arguments_bytes, rcx); | 396 __ Ret(arguments_bytes, rcx); |
397 | 397 |
398 #ifdef ENABLE_DEBUGGER_SUPPORT | 398 #ifdef ENABLE_DEBUGGER_SUPPORT |
399 // Add padding that will be overwritten by a debugger breakpoint. We | 399 // Add padding that will be overwritten by a debugger breakpoint. We |
400 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" | 400 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k" |
401 // (3 + 1 + 3). | 401 // (3 + 1 + 3). |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
538 ASSERT(count > 0); | 538 ASSERT(count > 0); |
539 __ Drop(count); | 539 __ Drop(count); |
540 __ Move(result_register(), reg); | 540 __ Move(result_register(), reg); |
541 } | 541 } |
542 | 542 |
543 | 543 |
544 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | 544 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, |
545 Register reg) const { | 545 Register reg) const { |
546 ASSERT(count > 0); | 546 ASSERT(count > 0); |
547 if (count > 1) __ Drop(count - 1); | 547 if (count > 1) __ Drop(count - 1); |
548 __ movq(Operand(rsp, 0), reg); | 548 __ movp(Operand(rsp, 0), reg); |
549 } | 549 } |
550 | 550 |
551 | 551 |
552 void FullCodeGenerator::TestContext::DropAndPlug(int count, | 552 void FullCodeGenerator::TestContext::DropAndPlug(int count, |
553 Register reg) const { | 553 Register reg) const { |
554 ASSERT(count > 0); | 554 ASSERT(count > 0); |
555 // For simplicity we always test the accumulator register. | 555 // For simplicity we always test the accumulator register. |
556 __ Drop(count); | 556 __ Drop(count); |
557 __ Move(result_register(), reg); | 557 __ Move(result_register(), reg); |
558 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | 558 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
681 return ContextOperand(scratch, var->index()); | 681 return ContextOperand(scratch, var->index()); |
682 } else { | 682 } else { |
683 return StackOperand(var); | 683 return StackOperand(var); |
684 } | 684 } |
685 } | 685 } |
686 | 686 |
687 | 687 |
688 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | 688 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
689 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | 689 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
690 MemOperand location = VarOperand(var, dest); | 690 MemOperand location = VarOperand(var, dest); |
691 __ movq(dest, location); | 691 __ movp(dest, location); |
692 } | 692 } |
693 | 693 |
694 | 694 |
695 void FullCodeGenerator::SetVar(Variable* var, | 695 void FullCodeGenerator::SetVar(Variable* var, |
696 Register src, | 696 Register src, |
697 Register scratch0, | 697 Register scratch0, |
698 Register scratch1) { | 698 Register scratch1) { |
699 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | 699 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
700 ASSERT(!scratch0.is(src)); | 700 ASSERT(!scratch0.is(src)); |
701 ASSERT(!scratch0.is(scratch1)); | 701 ASSERT(!scratch0.is(scratch1)); |
702 ASSERT(!scratch1.is(src)); | 702 ASSERT(!scratch1.is(src)); |
703 MemOperand location = VarOperand(var, scratch0); | 703 MemOperand location = VarOperand(var, scratch0); |
704 __ movq(location, src); | 704 __ movp(location, src); |
705 | 705 |
706 // Emit the write barrier code if the location is in the heap. | 706 // Emit the write barrier code if the location is in the heap. |
707 if (var->IsContextSlot()) { | 707 if (var->IsContextSlot()) { |
708 int offset = Context::SlotOffset(var->index()); | 708 int offset = Context::SlotOffset(var->index()); |
709 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); | 709 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); |
710 } | 710 } |
711 } | 711 } |
712 | 712 |
713 | 713 |
714 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | 714 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, |
(...skipping 14 matching lines...) Expand all Loading... |
729 __ bind(&skip); | 729 __ bind(&skip); |
730 } | 730 } |
731 } | 731 } |
732 | 732 |
733 | 733 |
734 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | 734 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { |
735 // The variable in the declaration always resides in the current context. | 735 // The variable in the declaration always resides in the current context. |
736 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 736 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
737 if (generate_debug_code_) { | 737 if (generate_debug_code_) { |
738 // Check that we're not inside a with or catch context. | 738 // Check that we're not inside a with or catch context. |
739 __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); | 739 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); |
740 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); | 740 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); |
741 __ Check(not_equal, kDeclarationInWithContext); | 741 __ Check(not_equal, kDeclarationInWithContext); |
742 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); | 742 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); |
743 __ Check(not_equal, kDeclarationInCatchContext); | 743 __ Check(not_equal, kDeclarationInCatchContext); |
744 } | 744 } |
745 } | 745 } |
746 | 746 |
747 | 747 |
748 void FullCodeGenerator::VisitVariableDeclaration( | 748 void FullCodeGenerator::VisitVariableDeclaration( |
749 VariableDeclaration* declaration) { | 749 VariableDeclaration* declaration) { |
(...skipping 11 matching lines...) Expand all Loading... |
761 ? isolate()->factory()->the_hole_value() | 761 ? isolate()->factory()->the_hole_value() |
762 : isolate()->factory()->undefined_value(), | 762 : isolate()->factory()->undefined_value(), |
763 zone()); | 763 zone()); |
764 break; | 764 break; |
765 | 765 |
766 case Variable::PARAMETER: | 766 case Variable::PARAMETER: |
767 case Variable::LOCAL: | 767 case Variable::LOCAL: |
768 if (hole_init) { | 768 if (hole_init) { |
769 Comment cmnt(masm_, "[ VariableDeclaration"); | 769 Comment cmnt(masm_, "[ VariableDeclaration"); |
770 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 770 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
771 __ movq(StackOperand(variable), kScratchRegister); | 771 __ movp(StackOperand(variable), kScratchRegister); |
772 } | 772 } |
773 break; | 773 break; |
774 | 774 |
775 case Variable::CONTEXT: | 775 case Variable::CONTEXT: |
776 if (hole_init) { | 776 if (hole_init) { |
777 Comment cmnt(masm_, "[ VariableDeclaration"); | 777 Comment cmnt(masm_, "[ VariableDeclaration"); |
778 EmitDebugCheckDeclarationContext(variable); | 778 EmitDebugCheckDeclarationContext(variable); |
779 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 779 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
780 __ movq(ContextOperand(rsi, variable->index()), kScratchRegister); | 780 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister); |
781 // No write barrier since the hole value is in old space. | 781 // No write barrier since the hole value is in old space. |
782 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 782 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
783 } | 783 } |
784 break; | 784 break; |
785 | 785 |
786 case Variable::LOOKUP: { | 786 case Variable::LOOKUP: { |
787 Comment cmnt(masm_, "[ VariableDeclaration"); | 787 Comment cmnt(masm_, "[ VariableDeclaration"); |
788 __ push(rsi); | 788 __ push(rsi); |
789 __ Push(variable->name()); | 789 __ Push(variable->name()); |
790 // Declaration nodes are always introduced in one of four modes. | 790 // Declaration nodes are always introduced in one of four modes. |
(...skipping 29 matching lines...) Expand all Loading... |
820 // Check for stack-overflow exception. | 820 // Check for stack-overflow exception. |
821 if (function.is_null()) return SetStackOverflow(); | 821 if (function.is_null()) return SetStackOverflow(); |
822 globals_->Add(function, zone()); | 822 globals_->Add(function, zone()); |
823 break; | 823 break; |
824 } | 824 } |
825 | 825 |
826 case Variable::PARAMETER: | 826 case Variable::PARAMETER: |
827 case Variable::LOCAL: { | 827 case Variable::LOCAL: { |
828 Comment cmnt(masm_, "[ FunctionDeclaration"); | 828 Comment cmnt(masm_, "[ FunctionDeclaration"); |
829 VisitForAccumulatorValue(declaration->fun()); | 829 VisitForAccumulatorValue(declaration->fun()); |
830 __ movq(StackOperand(variable), result_register()); | 830 __ movp(StackOperand(variable), result_register()); |
831 break; | 831 break; |
832 } | 832 } |
833 | 833 |
834 case Variable::CONTEXT: { | 834 case Variable::CONTEXT: { |
835 Comment cmnt(masm_, "[ FunctionDeclaration"); | 835 Comment cmnt(masm_, "[ FunctionDeclaration"); |
836 EmitDebugCheckDeclarationContext(variable); | 836 EmitDebugCheckDeclarationContext(variable); |
837 VisitForAccumulatorValue(declaration->fun()); | 837 VisitForAccumulatorValue(declaration->fun()); |
838 __ movq(ContextOperand(rsi, variable->index()), result_register()); | 838 __ movp(ContextOperand(rsi, variable->index()), result_register()); |
839 int offset = Context::SlotOffset(variable->index()); | 839 int offset = Context::SlotOffset(variable->index()); |
840 // We know that we have written a function, which is not a smi. | 840 // We know that we have written a function, which is not a smi. |
841 __ RecordWriteContextSlot(rsi, | 841 __ RecordWriteContextSlot(rsi, |
842 offset, | 842 offset, |
843 result_register(), | 843 result_register(), |
844 rcx, | 844 rcx, |
845 kDontSaveFPRegs, | 845 kDontSaveFPRegs, |
846 EMIT_REMEMBERED_SET, | 846 EMIT_REMEMBERED_SET, |
847 OMIT_SMI_CHECK); | 847 OMIT_SMI_CHECK); |
848 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 848 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
(...skipping 16 matching lines...) Expand all Loading... |
865 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { | 865 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { |
866 Variable* variable = declaration->proxy()->var(); | 866 Variable* variable = declaration->proxy()->var(); |
867 ASSERT(variable->location() == Variable::CONTEXT); | 867 ASSERT(variable->location() == Variable::CONTEXT); |
868 ASSERT(variable->interface()->IsFrozen()); | 868 ASSERT(variable->interface()->IsFrozen()); |
869 | 869 |
870 Comment cmnt(masm_, "[ ModuleDeclaration"); | 870 Comment cmnt(masm_, "[ ModuleDeclaration"); |
871 EmitDebugCheckDeclarationContext(variable); | 871 EmitDebugCheckDeclarationContext(variable); |
872 | 872 |
873 // Load instance object. | 873 // Load instance object. |
874 __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope())); | 874 __ LoadContext(rax, scope_->ContextChainLength(scope_->GlobalScope())); |
875 __ movq(rax, ContextOperand(rax, variable->interface()->Index())); | 875 __ movp(rax, ContextOperand(rax, variable->interface()->Index())); |
876 __ movq(rax, ContextOperand(rax, Context::EXTENSION_INDEX)); | 876 __ movp(rax, ContextOperand(rax, Context::EXTENSION_INDEX)); |
877 | 877 |
878 // Assign it. | 878 // Assign it. |
879 __ movq(ContextOperand(rsi, variable->index()), rax); | 879 __ movp(ContextOperand(rsi, variable->index()), rax); |
880 // We know that we have written a module, which is not a smi. | 880 // We know that we have written a module, which is not a smi. |
881 __ RecordWriteContextSlot(rsi, | 881 __ RecordWriteContextSlot(rsi, |
882 Context::SlotOffset(variable->index()), | 882 Context::SlotOffset(variable->index()), |
883 rax, | 883 rax, |
884 rcx, | 884 rcx, |
885 kDontSaveFPRegs, | 885 kDontSaveFPRegs, |
886 EMIT_REMEMBERED_SET, | 886 EMIT_REMEMBERED_SET, |
887 OMIT_SMI_CHECK); | 887 OMIT_SMI_CHECK); |
888 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); | 888 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); |
889 | 889 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
963 } | 963 } |
964 | 964 |
965 Comment cmnt(masm_, "[ Case comparison"); | 965 Comment cmnt(masm_, "[ Case comparison"); |
966 __ bind(&next_test); | 966 __ bind(&next_test); |
967 next_test.Unuse(); | 967 next_test.Unuse(); |
968 | 968 |
969 // Compile the label expression. | 969 // Compile the label expression. |
970 VisitForAccumulatorValue(clause->label()); | 970 VisitForAccumulatorValue(clause->label()); |
971 | 971 |
972 // Perform the comparison as if via '==='. | 972 // Perform the comparison as if via '==='. |
973 __ movq(rdx, Operand(rsp, 0)); // Switch value. | 973 __ movp(rdx, Operand(rsp, 0)); // Switch value. |
974 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 974 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
975 JumpPatchSite patch_site(masm_); | 975 JumpPatchSite patch_site(masm_); |
976 if (inline_smi_code) { | 976 if (inline_smi_code) { |
977 Label slow_case; | 977 Label slow_case; |
978 __ movq(rcx, rdx); | 978 __ movp(rcx, rdx); |
979 __ or_(rcx, rax); | 979 __ or_(rcx, rax); |
980 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); | 980 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); |
981 | 981 |
982 __ cmpq(rdx, rax); | 982 __ cmpq(rdx, rax); |
983 __ j(not_equal, &next_test); | 983 __ j(not_equal, &next_test); |
984 __ Drop(1); // Switch value is no longer needed. | 984 __ Drop(1); // Switch value is no longer needed. |
985 __ jmp(clause->body_target()); | 985 __ jmp(clause->body_target()); |
986 __ bind(&slow_case); | 986 __ bind(&slow_case); |
987 } | 987 } |
988 | 988 |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1070 | 1070 |
1071 // Check cache validity in generated code. This is a fast case for | 1071 // Check cache validity in generated code. This is a fast case for |
1072 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | 1072 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
1073 // guarantee cache validity, call the runtime system to check cache | 1073 // guarantee cache validity, call the runtime system to check cache |
1074 // validity or get the property names in a fixed array. | 1074 // validity or get the property names in a fixed array. |
1075 __ CheckEnumCache(null_value, &call_runtime); | 1075 __ CheckEnumCache(null_value, &call_runtime); |
1076 | 1076 |
1077 // The enum cache is valid. Load the map of the object being | 1077 // The enum cache is valid. Load the map of the object being |
1078 // iterated over and use the cache for the iteration. | 1078 // iterated over and use the cache for the iteration. |
1079 Label use_cache; | 1079 Label use_cache; |
1080 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); | 1080 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
1081 __ jmp(&use_cache, Label::kNear); | 1081 __ jmp(&use_cache, Label::kNear); |
1082 | 1082 |
1083 // Get the set of properties to enumerate. | 1083 // Get the set of properties to enumerate. |
1084 __ bind(&call_runtime); | 1084 __ bind(&call_runtime); |
1085 __ push(rax); // Duplicate the enumerable object on the stack. | 1085 __ push(rax); // Duplicate the enumerable object on the stack. |
1086 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | 1086 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
1087 | 1087 |
1088 // If we got a map from the runtime call, we can do a fast | 1088 // If we got a map from the runtime call, we can do a fast |
1089 // modification check. Otherwise, we got a fixed array, and we have | 1089 // modification check. Otherwise, we got a fixed array, and we have |
1090 // to do a slow check. | 1090 // to do a slow check. |
1091 Label fixed_array; | 1091 Label fixed_array; |
1092 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 1092 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
1093 Heap::kMetaMapRootIndex); | 1093 Heap::kMetaMapRootIndex); |
1094 __ j(not_equal, &fixed_array); | 1094 __ j(not_equal, &fixed_array); |
1095 | 1095 |
1096 // We got a map in register rax. Get the enumeration cache from it. | 1096 // We got a map in register rax. Get the enumeration cache from it. |
1097 __ bind(&use_cache); | 1097 __ bind(&use_cache); |
1098 | 1098 |
1099 Label no_descriptors; | 1099 Label no_descriptors; |
1100 | 1100 |
1101 __ EnumLength(rdx, rax); | 1101 __ EnumLength(rdx, rax); |
1102 __ Cmp(rdx, Smi::FromInt(0)); | 1102 __ Cmp(rdx, Smi::FromInt(0)); |
1103 __ j(equal, &no_descriptors); | 1103 __ j(equal, &no_descriptors); |
1104 | 1104 |
1105 __ LoadInstanceDescriptors(rax, rcx); | 1105 __ LoadInstanceDescriptors(rax, rcx); |
1106 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset)); | 1106 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset)); |
1107 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1107 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1108 | 1108 |
1109 // Set up the four remaining stack slots. | 1109 // Set up the four remaining stack slots. |
1110 __ push(rax); // Map. | 1110 __ push(rax); // Map. |
1111 __ push(rcx); // Enumeration cache. | 1111 __ push(rcx); // Enumeration cache. |
1112 __ push(rdx); // Number of valid entries for the map in the enum cache. | 1112 __ push(rdx); // Number of valid entries for the map in the enum cache. |
1113 __ Push(Smi::FromInt(0)); // Initial index. | 1113 __ Push(Smi::FromInt(0)); // Initial index. |
1114 __ jmp(&loop); | 1114 __ jmp(&loop); |
1115 | 1115 |
1116 __ bind(&no_descriptors); | 1116 __ bind(&no_descriptors); |
1117 __ addq(rsp, Immediate(kPointerSize)); | 1117 __ addq(rsp, Immediate(kPointerSize)); |
1118 __ jmp(&exit); | 1118 __ jmp(&exit); |
1119 | 1119 |
1120 // We got a fixed array in register rax. Iterate through that. | 1120 // We got a fixed array in register rax. Iterate through that. |
1121 Label non_proxy; | 1121 Label non_proxy; |
1122 __ bind(&fixed_array); | 1122 __ bind(&fixed_array); |
1123 | 1123 |
1124 Handle<Cell> cell = isolate()->factory()->NewCell( | 1124 Handle<Cell> cell = isolate()->factory()->NewCell( |
1125 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), | 1125 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), |
1126 isolate())); | 1126 isolate())); |
1127 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); | 1127 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); |
1128 __ Move(rbx, cell); | 1128 __ Move(rbx, cell); |
1129 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 1129 __ Move(FieldOperand(rbx, Cell::kValueOffset), |
1130 Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)); | 1130 Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)); |
1131 | 1131 |
1132 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check | 1132 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check |
1133 __ movq(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object | 1133 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object |
1134 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 1134 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
1135 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx); | 1135 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx); |
1136 __ j(above, &non_proxy); | 1136 __ j(above, &non_proxy); |
1137 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy | 1137 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy |
1138 __ bind(&non_proxy); | 1138 __ bind(&non_proxy); |
1139 __ push(rbx); // Smi | 1139 __ push(rbx); // Smi |
1140 __ push(rax); // Array | 1140 __ push(rax); // Array |
1141 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); | 1141 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
1142 __ push(rax); // Fixed array length (as smi). | 1142 __ push(rax); // Fixed array length (as smi). |
1143 __ Push(Smi::FromInt(0)); // Initial index. | 1143 __ Push(Smi::FromInt(0)); // Initial index. |
1144 | 1144 |
1145 // Generate code for doing the condition check. | 1145 // Generate code for doing the condition check. |
1146 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | 1146 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
1147 __ bind(&loop); | 1147 __ bind(&loop); |
1148 __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. | 1148 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. |
1149 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. | 1149 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. |
1150 __ j(above_equal, loop_statement.break_label()); | 1150 __ j(above_equal, loop_statement.break_label()); |
1151 | 1151 |
1152 // Get the current entry of the array into register rbx. | 1152 // Get the current entry of the array into register rbx. |
1153 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 1153 __ movp(rbx, Operand(rsp, 2 * kPointerSize)); |
1154 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2); | 1154 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2); |
1155 __ movq(rbx, FieldOperand(rbx, | 1155 __ movp(rbx, FieldOperand(rbx, |
1156 index.reg, | 1156 index.reg, |
1157 index.scale, | 1157 index.scale, |
1158 FixedArray::kHeaderSize)); | 1158 FixedArray::kHeaderSize)); |
1159 | 1159 |
1160 // Get the expected map from the stack or a smi in the | 1160 // Get the expected map from the stack or a smi in the |
1161 // permanent slow case into register rdx. | 1161 // permanent slow case into register rdx. |
1162 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); | 1162 __ movp(rdx, Operand(rsp, 3 * kPointerSize)); |
1163 | 1163 |
1164 // Check if the expected map still matches that of the enumerable. | 1164 // Check if the expected map still matches that of the enumerable. |
1165 // If not, we may have to filter the key. | 1165 // If not, we may have to filter the key. |
1166 Label update_each; | 1166 Label update_each; |
1167 __ movq(rcx, Operand(rsp, 4 * kPointerSize)); | 1167 __ movp(rcx, Operand(rsp, 4 * kPointerSize)); |
1168 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); | 1168 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); |
1169 __ j(equal, &update_each, Label::kNear); | 1169 __ j(equal, &update_each, Label::kNear); |
1170 | 1170 |
1171 // For proxies, no filtering is done. | 1171 // For proxies, no filtering is done. |
1172 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | 1172 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. |
1173 __ Cmp(rdx, Smi::FromInt(0)); | 1173 __ Cmp(rdx, Smi::FromInt(0)); |
1174 __ j(equal, &update_each, Label::kNear); | 1174 __ j(equal, &update_each, Label::kNear); |
1175 | 1175 |
1176 // Convert the entry to a string or null if it isn't a property | 1176 // Convert the entry to a string or null if it isn't a property |
1177 // anymore. If the property has been removed while iterating, we | 1177 // anymore. If the property has been removed while iterating, we |
1178 // just skip it. | 1178 // just skip it. |
1179 __ push(rcx); // Enumerable. | 1179 __ push(rcx); // Enumerable. |
1180 __ push(rbx); // Current entry. | 1180 __ push(rbx); // Current entry. |
1181 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); | 1181 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); |
1182 __ Cmp(rax, Smi::FromInt(0)); | 1182 __ Cmp(rax, Smi::FromInt(0)); |
1183 __ j(equal, loop_statement.continue_label()); | 1183 __ j(equal, loop_statement.continue_label()); |
1184 __ movq(rbx, rax); | 1184 __ movp(rbx, rax); |
1185 | 1185 |
1186 // Update the 'each' property or variable from the possibly filtered | 1186 // Update the 'each' property or variable from the possibly filtered |
1187 // entry in register rbx. | 1187 // entry in register rbx. |
1188 __ bind(&update_each); | 1188 __ bind(&update_each); |
1189 __ movq(result_register(), rbx); | 1189 __ movp(result_register(), rbx); |
1190 // Perform the assignment as if via '='. | 1190 // Perform the assignment as if via '='. |
1191 { EffectContext context(this); | 1191 { EffectContext context(this); |
1192 EmitAssignment(stmt->each()); | 1192 EmitAssignment(stmt->each()); |
1193 } | 1193 } |
1194 | 1194 |
1195 // Generate code for the body of the loop. | 1195 // Generate code for the body of the loop. |
1196 Visit(stmt->body()); | 1196 Visit(stmt->body()); |
1197 | 1197 |
1198 // Generate code for going to the next element by incrementing the | 1198 // Generate code for going to the next element by incrementing the |
1199 // index (smi) stored on top of the stack. | 1199 // index (smi) stored on top of the stack. |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1315 Scope* s = scope(); | 1315 Scope* s = scope(); |
1316 while (s != NULL) { | 1316 while (s != NULL) { |
1317 if (s->num_heap_slots() > 0) { | 1317 if (s->num_heap_slots() > 0) { |
1318 if (s->calls_non_strict_eval()) { | 1318 if (s->calls_non_strict_eval()) { |
1319 // Check that extension is NULL. | 1319 // Check that extension is NULL. |
1320 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), | 1320 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), |
1321 Immediate(0)); | 1321 Immediate(0)); |
1322 __ j(not_equal, slow); | 1322 __ j(not_equal, slow); |
1323 } | 1323 } |
1324 // Load next context in chain. | 1324 // Load next context in chain. |
1325 __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1325 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); |
1326 // Walk the rest of the chain without clobbering rsi. | 1326 // Walk the rest of the chain without clobbering rsi. |
1327 context = temp; | 1327 context = temp; |
1328 } | 1328 } |
1329 // If no outer scope calls eval, we do not need to check more | 1329 // If no outer scope calls eval, we do not need to check more |
1330 // context extensions. If we have reached an eval scope, we check | 1330 // context extensions. If we have reached an eval scope, we check |
1331 // all extensions from this point. | 1331 // all extensions from this point. |
1332 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; | 1332 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; |
1333 s = s->outer_scope(); | 1333 s = s->outer_scope(); |
1334 } | 1334 } |
1335 | 1335 |
1336 if (s != NULL && s->is_eval_scope()) { | 1336 if (s != NULL && s->is_eval_scope()) { |
1337 // Loop up the context chain. There is no frame effect so it is | 1337 // Loop up the context chain. There is no frame effect so it is |
1338 // safe to use raw labels here. | 1338 // safe to use raw labels here. |
1339 Label next, fast; | 1339 Label next, fast; |
1340 if (!context.is(temp)) { | 1340 if (!context.is(temp)) { |
1341 __ movq(temp, context); | 1341 __ movp(temp, context); |
1342 } | 1342 } |
1343 // Load map for comparison into register, outside loop. | 1343 // Load map for comparison into register, outside loop. |
1344 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex); | 1344 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex); |
1345 __ bind(&next); | 1345 __ bind(&next); |
1346 // Terminate at native context. | 1346 // Terminate at native context. |
1347 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); | 1347 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); |
1348 __ j(equal, &fast, Label::kNear); | 1348 __ j(equal, &fast, Label::kNear); |
1349 // Check that extension is NULL. | 1349 // Check that extension is NULL. |
1350 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); | 1350 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); |
1351 __ j(not_equal, slow); | 1351 __ j(not_equal, slow); |
1352 // Load next context in chain. | 1352 // Load next context in chain. |
1353 __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); | 1353 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); |
1354 __ jmp(&next); | 1354 __ jmp(&next); |
1355 __ bind(&fast); | 1355 __ bind(&fast); |
1356 } | 1356 } |
1357 | 1357 |
1358 // All extension objects were empty and it is safe to use a global | 1358 // All extension objects were empty and it is safe to use a global |
1359 // load IC call. | 1359 // load IC call. |
1360 __ movq(rax, GlobalObjectOperand()); | 1360 __ movp(rax, GlobalObjectOperand()); |
1361 __ Move(rcx, var->name()); | 1361 __ Move(rcx, var->name()); |
1362 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) | 1362 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) |
1363 ? NOT_CONTEXTUAL | 1363 ? NOT_CONTEXTUAL |
1364 : CONTEXTUAL; | 1364 : CONTEXTUAL; |
1365 CallLoadIC(mode); | 1365 CallLoadIC(mode); |
1366 } | 1366 } |
1367 | 1367 |
1368 | 1368 |
1369 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1369 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
1370 Label* slow) { | 1370 Label* slow) { |
1371 ASSERT(var->IsContextSlot()); | 1371 ASSERT(var->IsContextSlot()); |
1372 Register context = rsi; | 1372 Register context = rsi; |
1373 Register temp = rbx; | 1373 Register temp = rbx; |
1374 | 1374 |
1375 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1375 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
1376 if (s->num_heap_slots() > 0) { | 1376 if (s->num_heap_slots() > 0) { |
1377 if (s->calls_non_strict_eval()) { | 1377 if (s->calls_non_strict_eval()) { |
1378 // Check that extension is NULL. | 1378 // Check that extension is NULL. |
1379 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), | 1379 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), |
1380 Immediate(0)); | 1380 Immediate(0)); |
1381 __ j(not_equal, slow); | 1381 __ j(not_equal, slow); |
1382 } | 1382 } |
1383 __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1383 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); |
1384 // Walk the rest of the chain without clobbering rsi. | 1384 // Walk the rest of the chain without clobbering rsi. |
1385 context = temp; | 1385 context = temp; |
1386 } | 1386 } |
1387 } | 1387 } |
1388 // Check that last extension is NULL. | 1388 // Check that last extension is NULL. |
1389 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); | 1389 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); |
1390 __ j(not_equal, slow); | 1390 __ j(not_equal, slow); |
1391 | 1391 |
1392 // This function is used only for loads, not stores, so it's safe to | 1392 // This function is used only for loads, not stores, so it's safe to |
1393 // return an rsi-based operand (the write barrier cannot be allowed to | 1393 // return an rsi-based operand (the write barrier cannot be allowed to |
1394 // destroy the rsi register). | 1394 // destroy the rsi register). |
1395 return ContextOperand(context, var->index()); | 1395 return ContextOperand(context, var->index()); |
1396 } | 1396 } |
1397 | 1397 |
1398 | 1398 |
1399 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, | 1399 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, |
1400 TypeofState typeof_state, | 1400 TypeofState typeof_state, |
1401 Label* slow, | 1401 Label* slow, |
1402 Label* done) { | 1402 Label* done) { |
1403 // Generate fast-case code for variables that might be shadowed by | 1403 // Generate fast-case code for variables that might be shadowed by |
1404 // eval-introduced variables. Eval is used a lot without | 1404 // eval-introduced variables. Eval is used a lot without |
1405 // introducing variables. In those cases, we do not want to | 1405 // introducing variables. In those cases, we do not want to |
1406 // perform a runtime call for all variables in the scope | 1406 // perform a runtime call for all variables in the scope |
1407 // containing the eval. | 1407 // containing the eval. |
1408 if (var->mode() == DYNAMIC_GLOBAL) { | 1408 if (var->mode() == DYNAMIC_GLOBAL) { |
1409 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); | 1409 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
1410 __ jmp(done); | 1410 __ jmp(done); |
1411 } else if (var->mode() == DYNAMIC_LOCAL) { | 1411 } else if (var->mode() == DYNAMIC_LOCAL) { |
1412 Variable* local = var->local_if_not_shadowed(); | 1412 Variable* local = var->local_if_not_shadowed(); |
1413 __ movq(rax, ContextSlotOperandCheckExtensions(local, slow)); | 1413 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow)); |
1414 if (local->mode() == LET || | 1414 if (local->mode() == LET || |
1415 local->mode() == CONST || | 1415 local->mode() == CONST || |
1416 local->mode() == CONST_HARMONY) { | 1416 local->mode() == CONST_HARMONY) { |
1417 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | 1417 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
1418 __ j(not_equal, done); | 1418 __ j(not_equal, done); |
1419 if (local->mode() == CONST) { | 1419 if (local->mode() == CONST) { |
1420 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 1420 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
1421 } else { // LET || CONST_HARMONY | 1421 } else { // LET || CONST_HARMONY |
1422 __ Push(var->name()); | 1422 __ Push(var->name()); |
1423 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1423 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
(...skipping 10 matching lines...) Expand all Loading... |
1434 Variable* var = proxy->var(); | 1434 Variable* var = proxy->var(); |
1435 | 1435 |
1436 // Three cases: global variables, lookup variables, and all other types of | 1436 // Three cases: global variables, lookup variables, and all other types of |
1437 // variables. | 1437 // variables. |
1438 switch (var->location()) { | 1438 switch (var->location()) { |
1439 case Variable::UNALLOCATED: { | 1439 case Variable::UNALLOCATED: { |
1440 Comment cmnt(masm_, "Global variable"); | 1440 Comment cmnt(masm_, "Global variable"); |
1441 // Use inline caching. Variable name is passed in rcx and the global | 1441 // Use inline caching. Variable name is passed in rcx and the global |
1442 // object on the stack. | 1442 // object on the stack. |
1443 __ Move(rcx, var->name()); | 1443 __ Move(rcx, var->name()); |
1444 __ movq(rax, GlobalObjectOperand()); | 1444 __ movp(rax, GlobalObjectOperand()); |
1445 CallLoadIC(CONTEXTUAL); | 1445 CallLoadIC(CONTEXTUAL); |
1446 context()->Plug(rax); | 1446 context()->Plug(rax); |
1447 break; | 1447 break; |
1448 } | 1448 } |
1449 | 1449 |
1450 case Variable::PARAMETER: | 1450 case Variable::PARAMETER: |
1451 case Variable::LOCAL: | 1451 case Variable::LOCAL: |
1452 case Variable::CONTEXT: { | 1452 case Variable::CONTEXT: { |
1453 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot"); | 1453 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot"); |
1454 if (var->binding_needs_init()) { | 1454 if (var->binding_needs_init()) { |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1530 | 1530 |
1531 | 1531 |
1532 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1532 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
1533 Comment cmnt(masm_, "[ RegExpLiteral"); | 1533 Comment cmnt(masm_, "[ RegExpLiteral"); |
1534 Label materialized; | 1534 Label materialized; |
1535 // Registers will be used as follows: | 1535 // Registers will be used as follows: |
1536 // rdi = JS function. | 1536 // rdi = JS function. |
1537 // rcx = literals array. | 1537 // rcx = literals array. |
1538 // rbx = regexp literal. | 1538 // rbx = regexp literal. |
1539 // rax = regexp literal clone. | 1539 // rax = regexp literal clone. |
1540 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1540 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1541 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); | 1541 __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); |
1542 int literal_offset = | 1542 int literal_offset = |
1543 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | 1543 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; |
1544 __ movq(rbx, FieldOperand(rcx, literal_offset)); | 1544 __ movp(rbx, FieldOperand(rcx, literal_offset)); |
1545 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 1545 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
1546 __ j(not_equal, &materialized, Label::kNear); | 1546 __ j(not_equal, &materialized, Label::kNear); |
1547 | 1547 |
1548 // Create regexp literal using runtime function | 1548 // Create regexp literal using runtime function |
1549 // Result will be in rax. | 1549 // Result will be in rax. |
1550 __ push(rcx); | 1550 __ push(rcx); |
1551 __ Push(Smi::FromInt(expr->literal_index())); | 1551 __ Push(Smi::FromInt(expr->literal_index())); |
1552 __ Push(expr->pattern()); | 1552 __ Push(expr->pattern()); |
1553 __ Push(expr->flags()); | 1553 __ Push(expr->flags()); |
1554 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 1554 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
1555 __ movq(rbx, rax); | 1555 __ movp(rbx, rax); |
1556 | 1556 |
1557 __ bind(&materialized); | 1557 __ bind(&materialized); |
1558 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 1558 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
1559 Label allocated, runtime_allocate; | 1559 Label allocated, runtime_allocate; |
1560 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); | 1560 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); |
1561 __ jmp(&allocated); | 1561 __ jmp(&allocated); |
1562 | 1562 |
1563 __ bind(&runtime_allocate); | 1563 __ bind(&runtime_allocate); |
1564 __ push(rbx); | 1564 __ push(rbx); |
1565 __ Push(Smi::FromInt(size)); | 1565 __ Push(Smi::FromInt(size)); |
1566 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 1566 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
1567 __ pop(rbx); | 1567 __ pop(rbx); |
1568 | 1568 |
1569 __ bind(&allocated); | 1569 __ bind(&allocated); |
1570 // Copy the content into the newly allocated memory. | 1570 // Copy the content into the newly allocated memory. |
1571 // (Unroll copy loop once for better throughput). | 1571 // (Unroll copy loop once for better throughput). |
1572 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 1572 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
1573 __ movq(rdx, FieldOperand(rbx, i)); | 1573 __ movp(rdx, FieldOperand(rbx, i)); |
1574 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); | 1574 __ movp(rcx, FieldOperand(rbx, i + kPointerSize)); |
1575 __ movq(FieldOperand(rax, i), rdx); | 1575 __ movp(FieldOperand(rax, i), rdx); |
1576 __ movq(FieldOperand(rax, i + kPointerSize), rcx); | 1576 __ movp(FieldOperand(rax, i + kPointerSize), rcx); |
1577 } | 1577 } |
1578 if ((size % (2 * kPointerSize)) != 0) { | 1578 if ((size % (2 * kPointerSize)) != 0) { |
1579 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); | 1579 __ movp(rdx, FieldOperand(rbx, size - kPointerSize)); |
1580 __ movq(FieldOperand(rax, size - kPointerSize), rdx); | 1580 __ movp(FieldOperand(rax, size - kPointerSize), rdx); |
1581 } | 1581 } |
1582 context()->Plug(rax); | 1582 context()->Plug(rax); |
1583 } | 1583 } |
1584 | 1584 |
1585 | 1585 |
1586 void FullCodeGenerator::EmitAccessor(Expression* expression) { | 1586 void FullCodeGenerator::EmitAccessor(Expression* expression) { |
1587 if (expression == NULL) { | 1587 if (expression == NULL) { |
1588 __ PushRoot(Heap::kNullValueRootIndex); | 1588 __ PushRoot(Heap::kNullValueRootIndex); |
1589 } else { | 1589 } else { |
1590 VisitForStackValue(expression); | 1590 VisitForStackValue(expression); |
(...skipping 10 matching lines...) Expand all Loading... |
1601 ? ObjectLiteral::kFastElements | 1601 ? ObjectLiteral::kFastElements |
1602 : ObjectLiteral::kNoFlags; | 1602 : ObjectLiteral::kNoFlags; |
1603 flags |= expr->has_function() | 1603 flags |= expr->has_function() |
1604 ? ObjectLiteral::kHasFunction | 1604 ? ObjectLiteral::kHasFunction |
1605 : ObjectLiteral::kNoFlags; | 1605 : ObjectLiteral::kNoFlags; |
1606 int properties_count = constant_properties->length() / 2; | 1606 int properties_count = constant_properties->length() / 2; |
1607 if ((FLAG_track_double_fields && expr->may_store_doubles()) || | 1607 if ((FLAG_track_double_fields && expr->may_store_doubles()) || |
1608 expr->depth() > 1 || Serializer::enabled() || | 1608 expr->depth() > 1 || Serializer::enabled() || |
1609 flags != ObjectLiteral::kFastElements || | 1609 flags != ObjectLiteral::kFastElements || |
1610 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { | 1610 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { |
1611 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1611 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1612 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); | 1612 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); |
1613 __ Push(Smi::FromInt(expr->literal_index())); | 1613 __ Push(Smi::FromInt(expr->literal_index())); |
1614 __ Push(constant_properties); | 1614 __ Push(constant_properties); |
1615 __ Push(Smi::FromInt(flags)); | 1615 __ Push(Smi::FromInt(flags)); |
1616 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); | 1616 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); |
1617 } else { | 1617 } else { |
1618 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1618 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1619 __ movq(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset)); | 1619 __ movp(rax, FieldOperand(rdi, JSFunction::kLiteralsOffset)); |
1620 __ Move(rbx, Smi::FromInt(expr->literal_index())); | 1620 __ Move(rbx, Smi::FromInt(expr->literal_index())); |
1621 __ Move(rcx, constant_properties); | 1621 __ Move(rcx, constant_properties); |
1622 __ Move(rdx, Smi::FromInt(flags)); | 1622 __ Move(rdx, Smi::FromInt(flags)); |
1623 FastCloneShallowObjectStub stub(properties_count); | 1623 FastCloneShallowObjectStub stub(properties_count); |
1624 __ CallStub(&stub); | 1624 __ CallStub(&stub); |
1625 } | 1625 } |
1626 | 1626 |
1627 // If result_saved is true the result is on top of the stack. If | 1627 // If result_saved is true the result is on top of the stack. If |
1628 // result_saved is false the result is in rax. | 1628 // result_saved is false the result is in rax. |
1629 bool result_saved = false; | 1629 bool result_saved = false; |
(...skipping 18 matching lines...) Expand all Loading... |
1648 case ObjectLiteral::Property::CONSTANT: | 1648 case ObjectLiteral::Property::CONSTANT: |
1649 UNREACHABLE(); | 1649 UNREACHABLE(); |
1650 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | 1650 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
1651 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); | 1651 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); |
1652 // Fall through. | 1652 // Fall through. |
1653 case ObjectLiteral::Property::COMPUTED: | 1653 case ObjectLiteral::Property::COMPUTED: |
1654 if (key->value()->IsInternalizedString()) { | 1654 if (key->value()->IsInternalizedString()) { |
1655 if (property->emit_store()) { | 1655 if (property->emit_store()) { |
1656 VisitForAccumulatorValue(value); | 1656 VisitForAccumulatorValue(value); |
1657 __ Move(rcx, key->value()); | 1657 __ Move(rcx, key->value()); |
1658 __ movq(rdx, Operand(rsp, 0)); | 1658 __ movp(rdx, Operand(rsp, 0)); |
1659 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId()); | 1659 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId()); |
1660 PrepareForBailoutForId(key->id(), NO_REGISTERS); | 1660 PrepareForBailoutForId(key->id(), NO_REGISTERS); |
1661 } else { | 1661 } else { |
1662 VisitForEffect(value); | 1662 VisitForEffect(value); |
1663 } | 1663 } |
1664 break; | 1664 break; |
1665 } | 1665 } |
1666 __ push(Operand(rsp, 0)); // Duplicate receiver. | 1666 __ push(Operand(rsp, 0)); // Duplicate receiver. |
1667 VisitForStackValue(key); | 1667 VisitForStackValue(key); |
1668 VisitForStackValue(value); | 1668 VisitForStackValue(value); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1743 // we can turn it off if we don't have anywhere else to transition to. | 1743 // we can turn it off if we don't have anywhere else to transition to. |
1744 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | 1744 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
1745 } | 1745 } |
1746 | 1746 |
1747 Heap* heap = isolate()->heap(); | 1747 Heap* heap = isolate()->heap(); |
1748 if (has_constant_fast_elements && | 1748 if (has_constant_fast_elements && |
1749 constant_elements_values->map() == heap->fixed_cow_array_map()) { | 1749 constant_elements_values->map() == heap->fixed_cow_array_map()) { |
1750 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot | 1750 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot |
1751 // change, so it's possible to specialize the stub in advance. | 1751 // change, so it's possible to specialize the stub in advance. |
1752 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); | 1752 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1); |
1753 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1753 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1754 __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 1754 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
1755 __ Move(rbx, Smi::FromInt(expr->literal_index())); | 1755 __ Move(rbx, Smi::FromInt(expr->literal_index())); |
1756 __ Move(rcx, constant_elements); | 1756 __ Move(rcx, constant_elements); |
1757 FastCloneShallowArrayStub stub( | 1757 FastCloneShallowArrayStub stub( |
1758 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, | 1758 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, |
1759 allocation_site_mode, | 1759 allocation_site_mode, |
1760 length); | 1760 length); |
1761 __ CallStub(&stub); | 1761 __ CallStub(&stub); |
1762 } else if (expr->depth() > 1 || Serializer::enabled() || | 1762 } else if (expr->depth() > 1 || Serializer::enabled() || |
1763 length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 1763 length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
1764 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1764 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1765 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 1765 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
1766 __ Push(Smi::FromInt(expr->literal_index())); | 1766 __ Push(Smi::FromInt(expr->literal_index())); |
1767 __ Push(constant_elements); | 1767 __ Push(constant_elements); |
1768 __ Push(Smi::FromInt(flags)); | 1768 __ Push(Smi::FromInt(flags)); |
1769 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); | 1769 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); |
1770 } else { | 1770 } else { |
1771 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || | 1771 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || |
1772 FLAG_smi_only_arrays); | 1772 FLAG_smi_only_arrays); |
1773 FastCloneShallowArrayStub::Mode mode = | 1773 FastCloneShallowArrayStub::Mode mode = |
1774 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; | 1774 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; |
1775 | 1775 |
1776 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot | 1776 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot |
1777 // change, so it's possible to specialize the stub in advance. | 1777 // change, so it's possible to specialize the stub in advance. |
1778 if (has_constant_fast_elements) { | 1778 if (has_constant_fast_elements) { |
1779 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; | 1779 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; |
1780 } | 1780 } |
1781 | 1781 |
1782 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1782 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1783 __ movq(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 1783 __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
1784 __ Move(rbx, Smi::FromInt(expr->literal_index())); | 1784 __ Move(rbx, Smi::FromInt(expr->literal_index())); |
1785 __ Move(rcx, constant_elements); | 1785 __ Move(rcx, constant_elements); |
1786 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 1786 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
1787 __ CallStub(&stub); | 1787 __ CallStub(&stub); |
1788 } | 1788 } |
1789 | 1789 |
1790 bool result_saved = false; // Is the result saved to the stack? | 1790 bool result_saved = false; // Is the result saved to the stack? |
1791 | 1791 |
1792 // Emit code to evaluate all the non-constant subexpressions and to store | 1792 // Emit code to evaluate all the non-constant subexpressions and to store |
1793 // them into the newly cloned array. | 1793 // them into the newly cloned array. |
1794 for (int i = 0; i < length; i++) { | 1794 for (int i = 0; i < length; i++) { |
1795 Expression* subexpr = subexprs->at(i); | 1795 Expression* subexpr = subexprs->at(i); |
1796 // If the subexpression is a literal or a simple materialized literal it | 1796 // If the subexpression is a literal or a simple materialized literal it |
1797 // is already set in the cloned array. | 1797 // is already set in the cloned array. |
1798 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; | 1798 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; |
1799 | 1799 |
1800 if (!result_saved) { | 1800 if (!result_saved) { |
1801 __ push(rax); // array literal | 1801 __ push(rax); // array literal |
1802 __ Push(Smi::FromInt(expr->literal_index())); | 1802 __ Push(Smi::FromInt(expr->literal_index())); |
1803 result_saved = true; | 1803 result_saved = true; |
1804 } | 1804 } |
1805 VisitForAccumulatorValue(subexpr); | 1805 VisitForAccumulatorValue(subexpr); |
1806 | 1806 |
1807 if (IsFastObjectElementsKind(constant_elements_kind)) { | 1807 if (IsFastObjectElementsKind(constant_elements_kind)) { |
1808 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they | 1808 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they |
1809 // cannot transition and don't need to call the runtime stub. | 1809 // cannot transition and don't need to call the runtime stub. |
1810 int offset = FixedArray::kHeaderSize + (i * kPointerSize); | 1810 int offset = FixedArray::kHeaderSize + (i * kPointerSize); |
1811 __ movq(rbx, Operand(rsp, kPointerSize)); // Copy of array literal. | 1811 __ movp(rbx, Operand(rsp, kPointerSize)); // Copy of array literal. |
1812 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); | 1812 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); |
1813 // Store the subexpression value in the array's elements. | 1813 // Store the subexpression value in the array's elements. |
1814 __ movq(FieldOperand(rbx, offset), result_register()); | 1814 __ movp(FieldOperand(rbx, offset), result_register()); |
1815 // Update the write barrier for the array store. | 1815 // Update the write barrier for the array store. |
1816 __ RecordWriteField(rbx, offset, result_register(), rcx, | 1816 __ RecordWriteField(rbx, offset, result_register(), rcx, |
1817 kDontSaveFPRegs, | 1817 kDontSaveFPRegs, |
1818 EMIT_REMEMBERED_SET, | 1818 EMIT_REMEMBERED_SET, |
1819 INLINE_SMI_CHECK); | 1819 INLINE_SMI_CHECK); |
1820 } else { | 1820 } else { |
1821 // Store the subexpression value in the array's elements. | 1821 // Store the subexpression value in the array's elements. |
1822 __ Move(rcx, Smi::FromInt(i)); | 1822 __ Move(rcx, Smi::FromInt(i)); |
1823 StoreArrayLiteralElementStub stub; | 1823 StoreArrayLiteralElementStub stub; |
1824 __ CallStub(&stub); | 1824 __ CallStub(&stub); |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1867 VisitForAccumulatorValue(property->obj()); | 1867 VisitForAccumulatorValue(property->obj()); |
1868 __ push(result_register()); | 1868 __ push(result_register()); |
1869 } else { | 1869 } else { |
1870 VisitForStackValue(property->obj()); | 1870 VisitForStackValue(property->obj()); |
1871 } | 1871 } |
1872 break; | 1872 break; |
1873 case KEYED_PROPERTY: { | 1873 case KEYED_PROPERTY: { |
1874 if (expr->is_compound()) { | 1874 if (expr->is_compound()) { |
1875 VisitForStackValue(property->obj()); | 1875 VisitForStackValue(property->obj()); |
1876 VisitForAccumulatorValue(property->key()); | 1876 VisitForAccumulatorValue(property->key()); |
1877 __ movq(rdx, Operand(rsp, 0)); | 1877 __ movp(rdx, Operand(rsp, 0)); |
1878 __ push(rax); | 1878 __ push(rax); |
1879 } else { | 1879 } else { |
1880 VisitForStackValue(property->obj()); | 1880 VisitForStackValue(property->obj()); |
1881 VisitForStackValue(property->key()); | 1881 VisitForStackValue(property->key()); |
1882 } | 1882 } |
1883 break; | 1883 break; |
1884 } | 1884 } |
1885 } | 1885 } |
1886 | 1886 |
1887 // For compound assignments we need another deoptimization point after the | 1887 // For compound assignments we need another deoptimization point after the |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1967 __ jmp(&suspend); | 1967 __ jmp(&suspend); |
1968 | 1968 |
1969 __ bind(&continuation); | 1969 __ bind(&continuation); |
1970 __ jmp(&resume); | 1970 __ jmp(&resume); |
1971 | 1971 |
1972 __ bind(&suspend); | 1972 __ bind(&suspend); |
1973 VisitForAccumulatorValue(expr->generator_object()); | 1973 VisitForAccumulatorValue(expr->generator_object()); |
1974 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); | 1974 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); |
1975 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), | 1975 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), |
1976 Smi::FromInt(continuation.pos())); | 1976 Smi::FromInt(continuation.pos())); |
1977 __ movq(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); | 1977 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); |
1978 __ movq(rcx, rsi); | 1978 __ movp(rcx, rsi); |
1979 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, | 1979 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, |
1980 kDontSaveFPRegs); | 1980 kDontSaveFPRegs); |
1981 __ lea(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset)); | 1981 __ lea(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset)); |
1982 __ cmpq(rsp, rbx); | 1982 __ cmpq(rsp, rbx); |
1983 __ j(equal, &post_runtime); | 1983 __ j(equal, &post_runtime); |
1984 __ push(rax); // generator object | 1984 __ push(rax); // generator object |
1985 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 1985 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
1986 __ movq(context_register(), | 1986 __ movp(context_register(), |
1987 Operand(rbp, StandardFrameConstants::kContextOffset)); | 1987 Operand(rbp, StandardFrameConstants::kContextOffset)); |
1988 __ bind(&post_runtime); | 1988 __ bind(&post_runtime); |
1989 | 1989 |
1990 __ pop(result_register()); | 1990 __ pop(result_register()); |
1991 EmitReturnSequence(); | 1991 EmitReturnSequence(); |
1992 | 1992 |
1993 __ bind(&resume); | 1993 __ bind(&resume); |
1994 context()->Plug(result_register()); | 1994 context()->Plug(result_register()); |
1995 break; | 1995 break; |
1996 } | 1996 } |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2035 __ bind(&l_try); | 2035 __ bind(&l_try); |
2036 __ pop(rax); // result | 2036 __ pop(rax); // result |
2037 __ PushTryHandler(StackHandler::CATCH, expr->index()); | 2037 __ PushTryHandler(StackHandler::CATCH, expr->index()); |
2038 const int handler_size = StackHandlerConstants::kSize; | 2038 const int handler_size = StackHandlerConstants::kSize; |
2039 __ push(rax); // result | 2039 __ push(rax); // result |
2040 __ jmp(&l_suspend); | 2040 __ jmp(&l_suspend); |
2041 __ bind(&l_continuation); | 2041 __ bind(&l_continuation); |
2042 __ jmp(&l_resume); | 2042 __ jmp(&l_resume); |
2043 __ bind(&l_suspend); | 2043 __ bind(&l_suspend); |
2044 const int generator_object_depth = kPointerSize + handler_size; | 2044 const int generator_object_depth = kPointerSize + handler_size; |
2045 __ movq(rax, Operand(rsp, generator_object_depth)); | 2045 __ movp(rax, Operand(rsp, generator_object_depth)); |
2046 __ push(rax); // g | 2046 __ push(rax); // g |
2047 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); | 2047 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); |
2048 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), | 2048 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), |
2049 Smi::FromInt(l_continuation.pos())); | 2049 Smi::FromInt(l_continuation.pos())); |
2050 __ movq(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); | 2050 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); |
2051 __ movq(rcx, rsi); | 2051 __ movp(rcx, rsi); |
2052 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, | 2052 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, |
2053 kDontSaveFPRegs); | 2053 kDontSaveFPRegs); |
2054 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 2054 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
2055 __ movq(context_register(), | 2055 __ movp(context_register(), |
2056 Operand(rbp, StandardFrameConstants::kContextOffset)); | 2056 Operand(rbp, StandardFrameConstants::kContextOffset)); |
2057 __ pop(rax); // result | 2057 __ pop(rax); // result |
2058 EmitReturnSequence(); | 2058 EmitReturnSequence(); |
2059 __ bind(&l_resume); // received in rax | 2059 __ bind(&l_resume); // received in rax |
2060 __ PopTryHandler(); | 2060 __ PopTryHandler(); |
2061 | 2061 |
2062 // receiver = iter; f = 'next'; arg = received; | 2062 // receiver = iter; f = 'next'; arg = received; |
2063 __ bind(&l_next); | 2063 __ bind(&l_next); |
2064 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next" | 2064 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next" |
2065 __ push(rcx); | 2065 __ push(rcx); |
2066 __ push(Operand(rsp, 2 * kPointerSize)); // iter | 2066 __ push(Operand(rsp, 2 * kPointerSize)); // iter |
2067 __ push(rax); // received | 2067 __ push(rax); // received |
2068 | 2068 |
2069 // result = receiver[f](arg); | 2069 // result = receiver[f](arg); |
2070 __ bind(&l_call); | 2070 __ bind(&l_call); |
2071 Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1); | 2071 Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1); |
2072 CallIC(ic); | 2072 CallIC(ic); |
2073 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2073 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2074 __ Drop(1); // The key is still on the stack; drop it. | 2074 __ Drop(1); // The key is still on the stack; drop it. |
2075 | 2075 |
2076 // if (!result.done) goto l_try; | 2076 // if (!result.done) goto l_try; |
2077 __ bind(&l_loop); | 2077 __ bind(&l_loop); |
2078 __ push(rax); // save result | 2078 __ push(rax); // save result |
2079 __ LoadRoot(rcx, Heap::kdone_stringRootIndex); // "done" | 2079 __ LoadRoot(rcx, Heap::kdone_stringRootIndex); // "done" |
2080 CallLoadIC(NOT_CONTEXTUAL); // result.done in rax | 2080 CallLoadIC(NOT_CONTEXTUAL); // result.done in rax |
2081 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); | 2081 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); |
2082 CallIC(bool_ic); | 2082 CallIC(bool_ic); |
2083 __ testq(result_register(), result_register()); | 2083 __ testq(result_register(), result_register()); |
(...skipping 24 matching lines...) Expand all Loading... |
2108 // Check generator state. | 2108 // Check generator state. |
2109 Label wrong_state, closed_state, done; | 2109 Label wrong_state, closed_state, done; |
2110 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); | 2110 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); |
2111 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); | 2111 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); |
2112 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), | 2112 __ SmiCompare(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), |
2113 Smi::FromInt(0)); | 2113 Smi::FromInt(0)); |
2114 __ j(equal, &closed_state); | 2114 __ j(equal, &closed_state); |
2115 __ j(less, &wrong_state); | 2115 __ j(less, &wrong_state); |
2116 | 2116 |
2117 // Load suspended function and context. | 2117 // Load suspended function and context. |
2118 __ movq(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); | 2118 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); |
2119 __ movq(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); | 2119 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); |
2120 | 2120 |
2121 // Push receiver. | 2121 // Push receiver. |
2122 __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); | 2122 __ push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); |
2123 | 2123 |
2124 // Push holes for arguments to generator function. | 2124 // Push holes for arguments to generator function. |
2125 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 2125 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
2126 __ movsxlq(rdx, | 2126 __ movsxlq(rdx, |
2127 FieldOperand(rdx, | 2127 FieldOperand(rdx, |
2128 SharedFunctionInfo::kFormalParameterCountOffset)); | 2128 SharedFunctionInfo::kFormalParameterCountOffset)); |
2129 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); | 2129 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); |
2130 Label push_argument_holes, push_frame; | 2130 Label push_argument_holes, push_frame; |
2131 __ bind(&push_argument_holes); | 2131 __ bind(&push_argument_holes); |
2132 __ subq(rdx, Immediate(1)); | 2132 __ subq(rdx, Immediate(1)); |
2133 __ j(carry, &push_frame); | 2133 __ j(carry, &push_frame); |
2134 __ push(rcx); | 2134 __ push(rcx); |
2135 __ jmp(&push_argument_holes); | 2135 __ jmp(&push_argument_holes); |
2136 | 2136 |
2137 // Enter a new JavaScript frame, and initialize its slots as they were when | 2137 // Enter a new JavaScript frame, and initialize its slots as they were when |
2138 // the generator was suspended. | 2138 // the generator was suspended. |
2139 Label resume_frame; | 2139 Label resume_frame; |
2140 __ bind(&push_frame); | 2140 __ bind(&push_frame); |
2141 __ call(&resume_frame); | 2141 __ call(&resume_frame); |
2142 __ jmp(&done); | 2142 __ jmp(&done); |
2143 __ bind(&resume_frame); | 2143 __ bind(&resume_frame); |
2144 __ push(rbp); // Caller's frame pointer. | 2144 __ push(rbp); // Caller's frame pointer. |
2145 __ movq(rbp, rsp); | 2145 __ movp(rbp, rsp); |
2146 __ push(rsi); // Callee's context. | 2146 __ push(rsi); // Callee's context. |
2147 __ push(rdi); // Callee's JS Function. | 2147 __ push(rdi); // Callee's JS Function. |
2148 | 2148 |
2149 // Load the operand stack size. | 2149 // Load the operand stack size. |
2150 __ movq(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset)); | 2150 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset)); |
2151 __ movq(rdx, FieldOperand(rdx, FixedArray::kLengthOffset)); | 2151 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset)); |
2152 __ SmiToInteger32(rdx, rdx); | 2152 __ SmiToInteger32(rdx, rdx); |
2153 | 2153 |
2154 // If we are sending a value and there is no operand stack, we can jump back | 2154 // If we are sending a value and there is no operand stack, we can jump back |
2155 // in directly. | 2155 // in directly. |
2156 if (resume_mode == JSGeneratorObject::NEXT) { | 2156 if (resume_mode == JSGeneratorObject::NEXT) { |
2157 Label slow_resume; | 2157 Label slow_resume; |
2158 __ cmpq(rdx, Immediate(0)); | 2158 __ cmpq(rdx, Immediate(0)); |
2159 __ j(not_zero, &slow_resume); | 2159 __ j(not_zero, &slow_resume); |
2160 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 2160 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
2161 __ SmiToInteger64(rcx, | 2161 __ SmiToInteger64(rcx, |
2162 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset)); | 2162 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset)); |
2163 __ addq(rdx, rcx); | 2163 __ addq(rdx, rcx); |
2164 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), | 2164 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), |
2165 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | 2165 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); |
2166 __ jmp(rdx); | 2166 __ jmp(rdx); |
2167 __ bind(&slow_resume); | 2167 __ bind(&slow_resume); |
2168 } | 2168 } |
2169 | 2169 |
2170 // Otherwise, we push holes for the operand stack and call the runtime to fix | 2170 // Otherwise, we push holes for the operand stack and call the runtime to fix |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2212 Label allocated; | 2212 Label allocated; |
2213 | 2213 |
2214 Handle<Map> map(isolate()->native_context()->generator_result_map()); | 2214 Handle<Map> map(isolate()->native_context()->generator_result_map()); |
2215 | 2215 |
2216 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT); | 2216 __ Allocate(map->instance_size(), rax, rcx, rdx, &gc_required, TAG_OBJECT); |
2217 __ jmp(&allocated); | 2217 __ jmp(&allocated); |
2218 | 2218 |
2219 __ bind(&gc_required); | 2219 __ bind(&gc_required); |
2220 __ Push(Smi::FromInt(map->instance_size())); | 2220 __ Push(Smi::FromInt(map->instance_size())); |
2221 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 2221 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
2222 __ movq(context_register(), | 2222 __ movp(context_register(), |
2223 Operand(rbp, StandardFrameConstants::kContextOffset)); | 2223 Operand(rbp, StandardFrameConstants::kContextOffset)); |
2224 | 2224 |
2225 __ bind(&allocated); | 2225 __ bind(&allocated); |
2226 __ Move(rbx, map); | 2226 __ Move(rbx, map); |
2227 __ pop(rcx); | 2227 __ pop(rcx); |
2228 __ Move(rdx, isolate()->factory()->ToBoolean(done)); | 2228 __ Move(rdx, isolate()->factory()->ToBoolean(done)); |
2229 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); | 2229 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); |
2230 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rbx); | 2230 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx); |
2231 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset), | 2231 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset), |
2232 isolate()->factory()->empty_fixed_array()); | 2232 isolate()->factory()->empty_fixed_array()); |
2233 __ Move(FieldOperand(rax, JSObject::kElementsOffset), | 2233 __ Move(FieldOperand(rax, JSObject::kElementsOffset), |
2234 isolate()->factory()->empty_fixed_array()); | 2234 isolate()->factory()->empty_fixed_array()); |
2235 __ movq(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset), | 2235 __ movp(FieldOperand(rax, JSGeneratorObject::kResultValuePropertyOffset), |
2236 rcx); | 2236 rcx); |
2237 __ movq(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset), | 2237 __ movp(FieldOperand(rax, JSGeneratorObject::kResultDonePropertyOffset), |
2238 rdx); | 2238 rdx); |
2239 | 2239 |
2240 // Only the value field needs a write barrier, as the other values are in the | 2240 // Only the value field needs a write barrier, as the other values are in the |
2241 // root set. | 2241 // root set. |
2242 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset, | 2242 __ RecordWriteField(rax, JSGeneratorObject::kResultValuePropertyOffset, |
2243 rcx, rdx, kDontSaveFPRegs); | 2243 rcx, rdx, kDontSaveFPRegs); |
2244 } | 2244 } |
2245 | 2245 |
2246 | 2246 |
2247 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { | 2247 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { |
(...skipping 14 matching lines...) Expand all Loading... |
2262 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | 2262 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
2263 Token::Value op, | 2263 Token::Value op, |
2264 OverwriteMode mode, | 2264 OverwriteMode mode, |
2265 Expression* left, | 2265 Expression* left, |
2266 Expression* right) { | 2266 Expression* right) { |
2267 // Do combined smi check of the operands. Left operand is on the | 2267 // Do combined smi check of the operands. Left operand is on the |
2268 // stack (popped into rdx). Right operand is in rax but moved into | 2268 // stack (popped into rdx). Right operand is in rax but moved into |
2269 // rcx to make the shifts easier. | 2269 // rcx to make the shifts easier. |
2270 Label done, stub_call, smi_case; | 2270 Label done, stub_call, smi_case; |
2271 __ pop(rdx); | 2271 __ pop(rdx); |
2272 __ movq(rcx, rax); | 2272 __ movp(rcx, rax); |
2273 __ or_(rax, rdx); | 2273 __ or_(rax, rdx); |
2274 JumpPatchSite patch_site(masm_); | 2274 JumpPatchSite patch_site(masm_); |
2275 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); | 2275 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); |
2276 | 2276 |
2277 __ bind(&stub_call); | 2277 __ bind(&stub_call); |
2278 __ movq(rax, rcx); | 2278 __ movp(rax, rcx); |
2279 BinaryOpICStub stub(op, mode); | 2279 BinaryOpICStub stub(op, mode); |
2280 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, | 2280 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL, |
2281 expr->BinaryOperationFeedbackId()); | 2281 expr->BinaryOperationFeedbackId()); |
2282 patch_site.EmitPatchInfo(); | 2282 patch_site.EmitPatchInfo(); |
2283 __ jmp(&done, Label::kNear); | 2283 __ jmp(&done, Label::kNear); |
2284 | 2284 |
2285 __ bind(&smi_case); | 2285 __ bind(&smi_case); |
2286 switch (op) { | 2286 switch (op) { |
2287 case Token::SAR: | 2287 case Token::SAR: |
2288 __ SmiShiftArithmeticRight(rax, rdx, rcx); | 2288 __ SmiShiftArithmeticRight(rax, rdx, rcx); |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2356 switch (assign_type) { | 2356 switch (assign_type) { |
2357 case VARIABLE: { | 2357 case VARIABLE: { |
2358 Variable* var = expr->AsVariableProxy()->var(); | 2358 Variable* var = expr->AsVariableProxy()->var(); |
2359 EffectContext context(this); | 2359 EffectContext context(this); |
2360 EmitVariableAssignment(var, Token::ASSIGN); | 2360 EmitVariableAssignment(var, Token::ASSIGN); |
2361 break; | 2361 break; |
2362 } | 2362 } |
2363 case NAMED_PROPERTY: { | 2363 case NAMED_PROPERTY: { |
2364 __ push(rax); // Preserve value. | 2364 __ push(rax); // Preserve value. |
2365 VisitForAccumulatorValue(prop->obj()); | 2365 VisitForAccumulatorValue(prop->obj()); |
2366 __ movq(rdx, rax); | 2366 __ movp(rdx, rax); |
2367 __ pop(rax); // Restore value. | 2367 __ pop(rax); // Restore value. |
2368 __ Move(rcx, prop->key()->AsLiteral()->value()); | 2368 __ Move(rcx, prop->key()->AsLiteral()->value()); |
2369 CallStoreIC(NOT_CONTEXTUAL); | 2369 CallStoreIC(NOT_CONTEXTUAL); |
2370 break; | 2370 break; |
2371 } | 2371 } |
2372 case KEYED_PROPERTY: { | 2372 case KEYED_PROPERTY: { |
2373 __ push(rax); // Preserve value. | 2373 __ push(rax); // Preserve value. |
2374 VisitForStackValue(prop->obj()); | 2374 VisitForStackValue(prop->obj()); |
2375 VisitForAccumulatorValue(prop->key()); | 2375 VisitForAccumulatorValue(prop->key()); |
2376 __ movq(rcx, rax); | 2376 __ movp(rcx, rax); |
2377 __ pop(rdx); | 2377 __ pop(rdx); |
2378 __ pop(rax); // Restore value. | 2378 __ pop(rax); // Restore value. |
2379 Handle<Code> ic = is_classic_mode() | 2379 Handle<Code> ic = is_classic_mode() |
2380 ? isolate()->builtins()->KeyedStoreIC_Initialize() | 2380 ? isolate()->builtins()->KeyedStoreIC_Initialize() |
2381 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | 2381 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); |
2382 CallIC(ic); | 2382 CallIC(ic); |
2383 break; | 2383 break; |
2384 } | 2384 } |
2385 } | 2385 } |
2386 context()->Plug(rax); | 2386 context()->Plug(rax); |
2387 } | 2387 } |
2388 | 2388 |
2389 | 2389 |
2390 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 2390 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
2391 Token::Value op) { | 2391 Token::Value op) { |
2392 if (var->IsUnallocated()) { | 2392 if (var->IsUnallocated()) { |
2393 // Global var, const, or let. | 2393 // Global var, const, or let. |
2394 __ Move(rcx, var->name()); | 2394 __ Move(rcx, var->name()); |
2395 __ movq(rdx, GlobalObjectOperand()); | 2395 __ movp(rdx, GlobalObjectOperand()); |
2396 CallStoreIC(CONTEXTUAL); | 2396 CallStoreIC(CONTEXTUAL); |
2397 } else if (op == Token::INIT_CONST) { | 2397 } else if (op == Token::INIT_CONST) { |
2398 // Const initializers need a write barrier. | 2398 // Const initializers need a write barrier. |
2399 ASSERT(!var->IsParameter()); // No const parameters. | 2399 ASSERT(!var->IsParameter()); // No const parameters. |
2400 if (var->IsStackLocal()) { | 2400 if (var->IsStackLocal()) { |
2401 Label skip; | 2401 Label skip; |
2402 __ movq(rdx, StackOperand(var)); | 2402 __ movp(rdx, StackOperand(var)); |
2403 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); | 2403 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); |
2404 __ j(not_equal, &skip); | 2404 __ j(not_equal, &skip); |
2405 __ movq(StackOperand(var), rax); | 2405 __ movp(StackOperand(var), rax); |
2406 __ bind(&skip); | 2406 __ bind(&skip); |
2407 } else { | 2407 } else { |
2408 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); | 2408 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); |
2409 // Like var declarations, const declarations are hoisted to function | 2409 // Like var declarations, const declarations are hoisted to function |
2410 // scope. However, unlike var initializers, const initializers are | 2410 // scope. However, unlike var initializers, const initializers are |
2411 // able to drill a hole to that function context, even from inside a | 2411 // able to drill a hole to that function context, even from inside a |
2412 // 'with' context. We thus bypass the normal static scope lookup for | 2412 // 'with' context. We thus bypass the normal static scope lookup for |
2413 // var->IsContextSlot(). | 2413 // var->IsContextSlot(). |
2414 __ push(rax); | 2414 __ push(rax); |
2415 __ push(rsi); | 2415 __ push(rsi); |
2416 __ Push(var->name()); | 2416 __ Push(var->name()); |
2417 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | 2417 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
2418 } | 2418 } |
2419 | 2419 |
2420 } else if (var->mode() == LET && op != Token::INIT_LET) { | 2420 } else if (var->mode() == LET && op != Token::INIT_LET) { |
2421 // Non-initializing assignment to let variable needs a write barrier. | 2421 // Non-initializing assignment to let variable needs a write barrier. |
2422 if (var->IsLookupSlot()) { | 2422 if (var->IsLookupSlot()) { |
2423 __ push(rax); // Value. | 2423 __ push(rax); // Value. |
2424 __ push(rsi); // Context. | 2424 __ push(rsi); // Context. |
2425 __ Push(var->name()); | 2425 __ Push(var->name()); |
2426 __ Push(Smi::FromInt(language_mode())); | 2426 __ Push(Smi::FromInt(language_mode())); |
2427 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2427 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
2428 } else { | 2428 } else { |
2429 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2429 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
2430 Label assign; | 2430 Label assign; |
2431 MemOperand location = VarOperand(var, rcx); | 2431 MemOperand location = VarOperand(var, rcx); |
2432 __ movq(rdx, location); | 2432 __ movp(rdx, location); |
2433 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); | 2433 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); |
2434 __ j(not_equal, &assign, Label::kNear); | 2434 __ j(not_equal, &assign, Label::kNear); |
2435 __ Push(var->name()); | 2435 __ Push(var->name()); |
2436 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 2436 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
2437 __ bind(&assign); | 2437 __ bind(&assign); |
2438 __ movq(location, rax); | 2438 __ movp(location, rax); |
2439 if (var->IsContextSlot()) { | 2439 if (var->IsContextSlot()) { |
2440 __ movq(rdx, rax); | 2440 __ movp(rdx, rax); |
2441 __ RecordWriteContextSlot( | 2441 __ RecordWriteContextSlot( |
2442 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs); | 2442 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs); |
2443 } | 2443 } |
2444 } | 2444 } |
2445 | 2445 |
2446 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { | 2446 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { |
2447 // Assignment to var or initializing assignment to let/const | 2447 // Assignment to var or initializing assignment to let/const |
2448 // in harmony mode. | 2448 // in harmony mode. |
2449 if (var->IsStackAllocated() || var->IsContextSlot()) { | 2449 if (var->IsStackAllocated() || var->IsContextSlot()) { |
2450 MemOperand location = VarOperand(var, rcx); | 2450 MemOperand location = VarOperand(var, rcx); |
2451 if (generate_debug_code_ && op == Token::INIT_LET) { | 2451 if (generate_debug_code_ && op == Token::INIT_LET) { |
2452 // Check for an uninitialized let binding. | 2452 // Check for an uninitialized let binding. |
2453 __ movq(rdx, location); | 2453 __ movp(rdx, location); |
2454 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); | 2454 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); |
2455 __ Check(equal, kLetBindingReInitialization); | 2455 __ Check(equal, kLetBindingReInitialization); |
2456 } | 2456 } |
2457 // Perform the assignment. | 2457 // Perform the assignment. |
2458 __ movq(location, rax); | 2458 __ movp(location, rax); |
2459 if (var->IsContextSlot()) { | 2459 if (var->IsContextSlot()) { |
2460 __ movq(rdx, rax); | 2460 __ movp(rdx, rax); |
2461 __ RecordWriteContextSlot( | 2461 __ RecordWriteContextSlot( |
2462 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs); | 2462 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs); |
2463 } | 2463 } |
2464 } else { | 2464 } else { |
2465 ASSERT(var->IsLookupSlot()); | 2465 ASSERT(var->IsLookupSlot()); |
2466 __ push(rax); // Value. | 2466 __ push(rax); // Value. |
2467 __ push(rsi); // Context. | 2467 __ push(rsi); // Context. |
2468 __ Push(var->name()); | 2468 __ Push(var->name()); |
2469 __ Push(Smi::FromInt(language_mode())); | 2469 __ Push(Smi::FromInt(language_mode())); |
2470 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2470 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2551 // Record source position for debugger. | 2551 // Record source position for debugger. |
2552 SetSourcePosition(expr->position()); | 2552 SetSourcePosition(expr->position()); |
2553 // Call the IC initialization code. | 2553 // Call the IC initialization code. |
2554 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count); | 2554 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count); |
2555 TypeFeedbackId ast_id = mode == CONTEXTUAL | 2555 TypeFeedbackId ast_id = mode == CONTEXTUAL |
2556 ? TypeFeedbackId::None() | 2556 ? TypeFeedbackId::None() |
2557 : expr->CallFeedbackId(); | 2557 : expr->CallFeedbackId(); |
2558 CallIC(ic, mode, ast_id); | 2558 CallIC(ic, mode, ast_id); |
2559 RecordJSReturnSite(expr); | 2559 RecordJSReturnSite(expr); |
2560 // Restore context register. | 2560 // Restore context register. |
2561 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2561 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2562 context()->Plug(rax); | 2562 context()->Plug(rax); |
2563 } | 2563 } |
2564 | 2564 |
2565 | 2565 |
2566 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, | 2566 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, |
2567 Expression* key) { | 2567 Expression* key) { |
2568 // Load the key. | 2568 // Load the key. |
2569 VisitForAccumulatorValue(key); | 2569 VisitForAccumulatorValue(key); |
2570 | 2570 |
2571 // Swap the name of the function and the receiver on the stack to follow | 2571 // Swap the name of the function and the receiver on the stack to follow |
2572 // the calling convention for call ICs. | 2572 // the calling convention for call ICs. |
2573 __ pop(rcx); | 2573 __ pop(rcx); |
2574 __ push(rax); | 2574 __ push(rax); |
2575 __ push(rcx); | 2575 __ push(rcx); |
2576 | 2576 |
2577 // Load the arguments. | 2577 // Load the arguments. |
2578 ZoneList<Expression*>* args = expr->arguments(); | 2578 ZoneList<Expression*>* args = expr->arguments(); |
2579 int arg_count = args->length(); | 2579 int arg_count = args->length(); |
2580 { PreservePositionScope scope(masm()->positions_recorder()); | 2580 { PreservePositionScope scope(masm()->positions_recorder()); |
2581 for (int i = 0; i < arg_count; i++) { | 2581 for (int i = 0; i < arg_count; i++) { |
2582 VisitForStackValue(args->at(i)); | 2582 VisitForStackValue(args->at(i)); |
2583 } | 2583 } |
2584 } | 2584 } |
2585 // Record source position for debugger. | 2585 // Record source position for debugger. |
2586 SetSourcePosition(expr->position()); | 2586 SetSourcePosition(expr->position()); |
2587 // Call the IC initialization code. | 2587 // Call the IC initialization code. |
2588 Handle<Code> ic = | 2588 Handle<Code> ic = |
2589 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); | 2589 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); |
2590 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. | 2590 __ movp(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. |
2591 CallIC(ic, NOT_CONTEXTUAL, expr->CallFeedbackId()); | 2591 CallIC(ic, NOT_CONTEXTUAL, expr->CallFeedbackId()); |
2592 RecordJSReturnSite(expr); | 2592 RecordJSReturnSite(expr); |
2593 // Restore context register. | 2593 // Restore context register. |
2594 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2594 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2595 context()->DropAndPlug(1, rax); // Drop the key still on the stack. | 2595 context()->DropAndPlug(1, rax); // Drop the key still on the stack. |
2596 } | 2596 } |
2597 | 2597 |
2598 | 2598 |
2599 void FullCodeGenerator::EmitCallWithStub(Call* expr) { | 2599 void FullCodeGenerator::EmitCallWithStub(Call* expr) { |
2600 // Code common for calls using the call stub. | 2600 // Code common for calls using the call stub. |
2601 ZoneList<Expression*>* args = expr->arguments(); | 2601 ZoneList<Expression*>* args = expr->arguments(); |
2602 int arg_count = args->length(); | 2602 int arg_count = args->length(); |
2603 { PreservePositionScope scope(masm()->positions_recorder()); | 2603 { PreservePositionScope scope(masm()->positions_recorder()); |
2604 for (int i = 0; i < arg_count; i++) { | 2604 for (int i = 0; i < arg_count; i++) { |
2605 VisitForStackValue(args->at(i)); | 2605 VisitForStackValue(args->at(i)); |
2606 } | 2606 } |
2607 } | 2607 } |
2608 // Record source position for debugger. | 2608 // Record source position for debugger. |
2609 SetSourcePosition(expr->position()); | 2609 SetSourcePosition(expr->position()); |
2610 | 2610 |
2611 Handle<Object> uninitialized = | 2611 Handle<Object> uninitialized = |
2612 TypeFeedbackCells::UninitializedSentinel(isolate()); | 2612 TypeFeedbackCells::UninitializedSentinel(isolate()); |
2613 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized); | 2613 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized); |
2614 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); | 2614 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); |
2615 __ Move(rbx, cell); | 2615 __ Move(rbx, cell); |
2616 | 2616 |
2617 // Record call targets in unoptimized code. | 2617 // Record call targets in unoptimized code. |
2618 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); | 2618 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); |
2619 __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); | 2619 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); |
2620 __ CallStub(&stub, expr->CallFeedbackId()); | 2620 __ CallStub(&stub, expr->CallFeedbackId()); |
2621 RecordJSReturnSite(expr); | 2621 RecordJSReturnSite(expr); |
2622 // Restore context register. | 2622 // Restore context register. |
2623 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2623 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2624 // Discard the function left on TOS. | 2624 // Discard the function left on TOS. |
2625 context()->DropAndPlug(1, rax); | 2625 context()->DropAndPlug(1, rax); |
2626 } | 2626 } |
2627 | 2627 |
2628 | 2628 |
2629 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2629 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
2630 // Push copy of the first argument or undefined if it doesn't exist. | 2630 // Push copy of the first argument or undefined if it doesn't exist. |
2631 if (arg_count > 0) { | 2631 if (arg_count > 0) { |
2632 __ push(Operand(rsp, arg_count * kPointerSize)); | 2632 __ push(Operand(rsp, arg_count * kPointerSize)); |
2633 } else { | 2633 } else { |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2675 VisitForStackValue(args->at(i)); | 2675 VisitForStackValue(args->at(i)); |
2676 } | 2676 } |
2677 | 2677 |
2678 // Push a copy of the function (found below the arguments) and resolve | 2678 // Push a copy of the function (found below the arguments) and resolve |
2679 // eval. | 2679 // eval. |
2680 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); | 2680 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); |
2681 EmitResolvePossiblyDirectEval(arg_count); | 2681 EmitResolvePossiblyDirectEval(arg_count); |
2682 | 2682 |
2683 // The runtime call returns a pair of values in rax (function) and | 2683 // The runtime call returns a pair of values in rax (function) and |
2684 // rdx (receiver). Touch up the stack with the right values. | 2684 // rdx (receiver). Touch up the stack with the right values. |
2685 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); | 2685 __ movp(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); |
2686 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); | 2686 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax); |
2687 } | 2687 } |
2688 // Record source position for debugger. | 2688 // Record source position for debugger. |
2689 SetSourcePosition(expr->position()); | 2689 SetSourcePosition(expr->position()); |
2690 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); | 2690 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); |
2691 __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); | 2691 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); |
2692 __ CallStub(&stub); | 2692 __ CallStub(&stub); |
2693 RecordJSReturnSite(expr); | 2693 RecordJSReturnSite(expr); |
2694 // Restore context register. | 2694 // Restore context register. |
2695 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2695 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
2696 context()->DropAndPlug(1, rax); | 2696 context()->DropAndPlug(1, rax); |
2697 } else if (call_type == Call::GLOBAL_CALL) { | 2697 } else if (call_type == Call::GLOBAL_CALL) { |
2698 // Call to a global variable. Push global object as receiver for the | 2698 // Call to a global variable. Push global object as receiver for the |
2699 // call IC lookup. | 2699 // call IC lookup. |
2700 __ push(GlobalObjectOperand()); | 2700 __ push(GlobalObjectOperand()); |
2701 VariableProxy* proxy = callee->AsVariableProxy(); | 2701 VariableProxy* proxy = callee->AsVariableProxy(); |
2702 EmitCallWithIC(expr, proxy->name(), CONTEXTUAL); | 2702 EmitCallWithIC(expr, proxy->name(), CONTEXTUAL); |
2703 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | 2703 } else if (call_type == Call::LOOKUP_SLOT_CALL) { |
2704 // Call to a lookup slot (dynamically introduced variable). | 2704 // Call to a lookup slot (dynamically introduced variable). |
2705 VariableProxy* proxy = callee->AsVariableProxy(); | 2705 VariableProxy* proxy = callee->AsVariableProxy(); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2783 for (int i = 0; i < arg_count; i++) { | 2783 for (int i = 0; i < arg_count; i++) { |
2784 VisitForStackValue(args->at(i)); | 2784 VisitForStackValue(args->at(i)); |
2785 } | 2785 } |
2786 | 2786 |
2787 // Call the construct call builtin that handles allocation and | 2787 // Call the construct call builtin that handles allocation and |
2788 // constructor invocation. | 2788 // constructor invocation. |
2789 SetSourcePosition(expr->position()); | 2789 SetSourcePosition(expr->position()); |
2790 | 2790 |
2791 // Load function and argument count into rdi and rax. | 2791 // Load function and argument count into rdi and rax. |
2792 __ Set(rax, arg_count); | 2792 __ Set(rax, arg_count); |
2793 __ movq(rdi, Operand(rsp, arg_count * kPointerSize)); | 2793 __ movp(rdi, Operand(rsp, arg_count * kPointerSize)); |
2794 | 2794 |
2795 // Record call targets in unoptimized code, but not in the snapshot. | 2795 // Record call targets in unoptimized code, but not in the snapshot. |
2796 Handle<Object> uninitialized = | 2796 Handle<Object> uninitialized = |
2797 TypeFeedbackCells::UninitializedSentinel(isolate()); | 2797 TypeFeedbackCells::UninitializedSentinel(isolate()); |
2798 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized); | 2798 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized); |
2799 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); | 2799 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); |
2800 __ Move(rbx, cell); | 2800 __ Move(rbx, cell); |
2801 | 2801 |
2802 CallConstructStub stub(RECORD_CALL_TARGET); | 2802 CallConstructStub stub(RECORD_CALL_TARGET); |
2803 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); | 2803 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2857 Label materialize_true, materialize_false; | 2857 Label materialize_true, materialize_false; |
2858 Label* if_true = NULL; | 2858 Label* if_true = NULL; |
2859 Label* if_false = NULL; | 2859 Label* if_false = NULL; |
2860 Label* fall_through = NULL; | 2860 Label* fall_through = NULL; |
2861 context()->PrepareTest(&materialize_true, &materialize_false, | 2861 context()->PrepareTest(&materialize_true, &materialize_false, |
2862 &if_true, &if_false, &fall_through); | 2862 &if_true, &if_false, &fall_through); |
2863 | 2863 |
2864 __ JumpIfSmi(rax, if_false); | 2864 __ JumpIfSmi(rax, if_false); |
2865 __ CompareRoot(rax, Heap::kNullValueRootIndex); | 2865 __ CompareRoot(rax, Heap::kNullValueRootIndex); |
2866 __ j(equal, if_true); | 2866 __ j(equal, if_true); |
2867 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2867 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
2868 // Undetectable objects behave like undefined when tested with typeof. | 2868 // Undetectable objects behave like undefined when tested with typeof. |
2869 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), | 2869 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), |
2870 Immediate(1 << Map::kIsUndetectable)); | 2870 Immediate(1 << Map::kIsUndetectable)); |
2871 __ j(not_zero, if_false); | 2871 __ j(not_zero, if_false); |
2872 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 2872 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
2873 __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2873 __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
2874 __ j(below, if_false); | 2874 __ j(below, if_false); |
2875 __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2875 __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
2876 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2876 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2877 Split(below_equal, if_true, if_false, fall_through); | 2877 Split(below_equal, if_true, if_false, fall_through); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2909 VisitForAccumulatorValue(args->at(0)); | 2909 VisitForAccumulatorValue(args->at(0)); |
2910 | 2910 |
2911 Label materialize_true, materialize_false; | 2911 Label materialize_true, materialize_false; |
2912 Label* if_true = NULL; | 2912 Label* if_true = NULL; |
2913 Label* if_false = NULL; | 2913 Label* if_false = NULL; |
2914 Label* fall_through = NULL; | 2914 Label* fall_through = NULL; |
2915 context()->PrepareTest(&materialize_true, &materialize_false, | 2915 context()->PrepareTest(&materialize_true, &materialize_false, |
2916 &if_true, &if_false, &fall_through); | 2916 &if_true, &if_false, &fall_through); |
2917 | 2917 |
2918 __ JumpIfSmi(rax, if_false); | 2918 __ JumpIfSmi(rax, if_false); |
2919 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2919 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
2920 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), | 2920 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), |
2921 Immediate(1 << Map::kIsUndetectable)); | 2921 Immediate(1 << Map::kIsUndetectable)); |
2922 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2922 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2923 Split(not_zero, if_true, if_false, fall_through); | 2923 Split(not_zero, if_true, if_false, fall_through); |
2924 | 2924 |
2925 context()->Plug(if_true, if_false); | 2925 context()->Plug(if_true, if_false); |
2926 } | 2926 } |
2927 | 2927 |
2928 | 2928 |
2929 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | 2929 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( |
2930 CallRuntime* expr) { | 2930 CallRuntime* expr) { |
2931 ZoneList<Expression*>* args = expr->arguments(); | 2931 ZoneList<Expression*>* args = expr->arguments(); |
2932 ASSERT(args->length() == 1); | 2932 ASSERT(args->length() == 1); |
2933 | 2933 |
2934 VisitForAccumulatorValue(args->at(0)); | 2934 VisitForAccumulatorValue(args->at(0)); |
2935 | 2935 |
2936 Label materialize_true, materialize_false, skip_lookup; | 2936 Label materialize_true, materialize_false, skip_lookup; |
2937 Label* if_true = NULL; | 2937 Label* if_true = NULL; |
2938 Label* if_false = NULL; | 2938 Label* if_false = NULL; |
2939 Label* fall_through = NULL; | 2939 Label* fall_through = NULL; |
2940 context()->PrepareTest(&materialize_true, &materialize_false, | 2940 context()->PrepareTest(&materialize_true, &materialize_false, |
2941 &if_true, &if_false, &fall_through); | 2941 &if_true, &if_false, &fall_through); |
2942 | 2942 |
2943 __ AssertNotSmi(rax); | 2943 __ AssertNotSmi(rax); |
2944 | 2944 |
2945 // Check whether this map has already been checked to be safe for default | 2945 // Check whether this map has already been checked to be safe for default |
2946 // valueOf. | 2946 // valueOf. |
2947 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2947 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
2948 __ testb(FieldOperand(rbx, Map::kBitField2Offset), | 2948 __ testb(FieldOperand(rbx, Map::kBitField2Offset), |
2949 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); | 2949 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
2950 __ j(not_zero, &skip_lookup); | 2950 __ j(not_zero, &skip_lookup); |
2951 | 2951 |
2952 // Check for fast case object. Generate false result for slow case object. | 2952 // Check for fast case object. Generate false result for slow case object. |
2953 __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset)); | 2953 __ movp(rcx, FieldOperand(rax, JSObject::kPropertiesOffset)); |
2954 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 2954 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
2955 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex); | 2955 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex); |
2956 __ j(equal, if_false); | 2956 __ j(equal, if_false); |
2957 | 2957 |
2958 // Look for valueOf string in the descriptor array, and indicate false if | 2958 // Look for valueOf string in the descriptor array, and indicate false if |
2959 // found. Since we omit an enumeration index check, if it is added via a | 2959 // found. Since we omit an enumeration index check, if it is added via a |
2960 // transition that shares its descriptor array, this is a false positive. | 2960 // transition that shares its descriptor array, this is a false positive. |
2961 Label entry, loop, done; | 2961 Label entry, loop, done; |
2962 | 2962 |
2963 // Skip loop if no descriptors are valid. | 2963 // Skip loop if no descriptors are valid. |
2964 __ NumberOfOwnDescriptors(rcx, rbx); | 2964 __ NumberOfOwnDescriptors(rcx, rbx); |
2965 __ cmpq(rcx, Immediate(0)); | 2965 __ cmpq(rcx, Immediate(0)); |
2966 __ j(equal, &done); | 2966 __ j(equal, &done); |
2967 | 2967 |
2968 __ LoadInstanceDescriptors(rbx, r8); | 2968 __ LoadInstanceDescriptors(rbx, r8); |
2969 // rbx: descriptor array. | 2969 // rbx: descriptor array. |
2970 // rcx: valid entries in the descriptor array. | 2970 // rcx: valid entries in the descriptor array. |
2971 // Calculate the end of the descriptor array. | 2971 // Calculate the end of the descriptor array. |
2972 __ imul(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize)); | 2972 __ imul(rcx, rcx, Immediate(DescriptorArray::kDescriptorSize)); |
2973 SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2); | 2973 SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2); |
2974 __ lea(rcx, | 2974 __ lea(rcx, |
2975 Operand( | 2975 Operand( |
2976 r8, index.reg, index.scale, DescriptorArray::kFirstOffset)); | 2976 r8, index.reg, index.scale, DescriptorArray::kFirstOffset)); |
2977 // Calculate location of the first key name. | 2977 // Calculate location of the first key name. |
2978 __ addq(r8, Immediate(DescriptorArray::kFirstOffset)); | 2978 __ addq(r8, Immediate(DescriptorArray::kFirstOffset)); |
2979 // Loop through all the keys in the descriptor array. If one of these is the | 2979 // Loop through all the keys in the descriptor array. If one of these is the |
2980 // internalized string "valueOf" the result is false. | 2980 // internalized string "valueOf" the result is false. |
2981 __ jmp(&entry); | 2981 __ jmp(&entry); |
2982 __ bind(&loop); | 2982 __ bind(&loop); |
2983 __ movq(rdx, FieldOperand(r8, 0)); | 2983 __ movp(rdx, FieldOperand(r8, 0)); |
2984 __ Cmp(rdx, isolate()->factory()->value_of_string()); | 2984 __ Cmp(rdx, isolate()->factory()->value_of_string()); |
2985 __ j(equal, if_false); | 2985 __ j(equal, if_false); |
2986 __ addq(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize)); | 2986 __ addq(r8, Immediate(DescriptorArray::kDescriptorSize * kPointerSize)); |
2987 __ bind(&entry); | 2987 __ bind(&entry); |
2988 __ cmpq(r8, rcx); | 2988 __ cmpq(r8, rcx); |
2989 __ j(not_equal, &loop); | 2989 __ j(not_equal, &loop); |
2990 | 2990 |
2991 __ bind(&done); | 2991 __ bind(&done); |
2992 | 2992 |
2993 // Set the bit in the map to indicate that there is no local valueOf field. | 2993 // Set the bit in the map to indicate that there is no local valueOf field. |
2994 __ or_(FieldOperand(rbx, Map::kBitField2Offset), | 2994 __ or_(FieldOperand(rbx, Map::kBitField2Offset), |
2995 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); | 2995 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
2996 | 2996 |
2997 __ bind(&skip_lookup); | 2997 __ bind(&skip_lookup); |
2998 | 2998 |
2999 // If a valueOf property is not found on the object check that its | 2999 // If a valueOf property is not found on the object check that its |
3000 // prototype is the un-modified String prototype. If not result is false. | 3000 // prototype is the un-modified String prototype. If not result is false. |
3001 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); | 3001 __ movp(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); |
3002 __ testq(rcx, Immediate(kSmiTagMask)); | 3002 __ testq(rcx, Immediate(kSmiTagMask)); |
3003 __ j(zero, if_false); | 3003 __ j(zero, if_false); |
3004 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); | 3004 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
3005 __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 3005 __ movp(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
3006 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); | 3006 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset)); |
3007 __ cmpq(rcx, | 3007 __ cmpq(rcx, |
3008 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); | 3008 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); |
3009 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3009 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3010 Split(equal, if_true, if_false, fall_through); | 3010 Split(equal, if_true, if_false, fall_through); |
3011 | 3011 |
3012 context()->Plug(if_true, if_false); | 3012 context()->Plug(if_true, if_false); |
3013 } | 3013 } |
3014 | 3014 |
3015 | 3015 |
3016 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { | 3016 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3111 ASSERT(expr->arguments()->length() == 0); | 3111 ASSERT(expr->arguments()->length() == 0); |
3112 | 3112 |
3113 Label materialize_true, materialize_false; | 3113 Label materialize_true, materialize_false; |
3114 Label* if_true = NULL; | 3114 Label* if_true = NULL; |
3115 Label* if_false = NULL; | 3115 Label* if_false = NULL; |
3116 Label* fall_through = NULL; | 3116 Label* fall_through = NULL; |
3117 context()->PrepareTest(&materialize_true, &materialize_false, | 3117 context()->PrepareTest(&materialize_true, &materialize_false, |
3118 &if_true, &if_false, &fall_through); | 3118 &if_true, &if_false, &fall_through); |
3119 | 3119 |
3120 // Get the frame pointer for the calling frame. | 3120 // Get the frame pointer for the calling frame. |
3121 __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 3121 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
3122 | 3122 |
3123 // Skip the arguments adaptor frame if it exists. | 3123 // Skip the arguments adaptor frame if it exists. |
3124 Label check_frame_marker; | 3124 Label check_frame_marker; |
3125 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset), | 3125 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset), |
3126 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 3126 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
3127 __ j(not_equal, &check_frame_marker); | 3127 __ j(not_equal, &check_frame_marker); |
3128 __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset)); | 3128 __ movp(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset)); |
3129 | 3129 |
3130 // Check the marker in the calling frame. | 3130 // Check the marker in the calling frame. |
3131 __ bind(&check_frame_marker); | 3131 __ bind(&check_frame_marker); |
3132 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset), | 3132 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset), |
3133 Smi::FromInt(StackFrame::CONSTRUCT)); | 3133 Smi::FromInt(StackFrame::CONSTRUCT)); |
3134 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3134 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3135 Split(equal, if_true, if_false, fall_through); | 3135 Split(equal, if_true, if_false, fall_through); |
3136 | 3136 |
3137 context()->Plug(if_true, if_false); | 3137 context()->Plug(if_true, if_false); |
3138 } | 3138 } |
(...skipping 23 matching lines...) Expand all Loading... |
3162 } | 3162 } |
3163 | 3163 |
3164 | 3164 |
3165 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { | 3165 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { |
3166 ZoneList<Expression*>* args = expr->arguments(); | 3166 ZoneList<Expression*>* args = expr->arguments(); |
3167 ASSERT(args->length() == 1); | 3167 ASSERT(args->length() == 1); |
3168 | 3168 |
3169 // ArgumentsAccessStub expects the key in rdx and the formal | 3169 // ArgumentsAccessStub expects the key in rdx and the formal |
3170 // parameter count in rax. | 3170 // parameter count in rax. |
3171 VisitForAccumulatorValue(args->at(0)); | 3171 VisitForAccumulatorValue(args->at(0)); |
3172 __ movq(rdx, rax); | 3172 __ movp(rdx, rax); |
3173 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); | 3173 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); |
3174 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | 3174 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
3175 __ CallStub(&stub); | 3175 __ CallStub(&stub); |
3176 context()->Plug(rax); | 3176 context()->Plug(rax); |
3177 } | 3177 } |
3178 | 3178 |
3179 | 3179 |
3180 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | 3180 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { |
3181 ASSERT(expr->arguments()->length() == 0); | 3181 ASSERT(expr->arguments()->length() == 0); |
3182 | 3182 |
3183 Label exit; | 3183 Label exit; |
3184 // Get the number of formal parameters. | 3184 // Get the number of formal parameters. |
3185 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); | 3185 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); |
3186 | 3186 |
3187 // Check if the calling frame is an arguments adaptor frame. | 3187 // Check if the calling frame is an arguments adaptor frame. |
3188 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 3188 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
3189 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), | 3189 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), |
3190 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 3190 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
3191 __ j(not_equal, &exit, Label::kNear); | 3191 __ j(not_equal, &exit, Label::kNear); |
3192 | 3192 |
3193 // Arguments adaptor case: Read the arguments length from the | 3193 // Arguments adaptor case: Read the arguments length from the |
3194 // adaptor frame. | 3194 // adaptor frame. |
3195 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 3195 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
3196 | 3196 |
3197 __ bind(&exit); | 3197 __ bind(&exit); |
3198 __ AssertSmi(rax); | 3198 __ AssertSmi(rax); |
3199 context()->Plug(rax); | 3199 context()->Plug(rax); |
3200 } | 3200 } |
3201 | 3201 |
3202 | 3202 |
3203 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | 3203 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { |
3204 ZoneList<Expression*>* args = expr->arguments(); | 3204 ZoneList<Expression*>* args = expr->arguments(); |
3205 ASSERT(args->length() == 1); | 3205 ASSERT(args->length() == 1); |
(...skipping 17 matching lines...) Expand all Loading... |
3223 __ j(equal, &function); | 3223 __ j(equal, &function); |
3224 | 3224 |
3225 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); | 3225 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); |
3226 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | 3226 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == |
3227 LAST_SPEC_OBJECT_TYPE - 1); | 3227 LAST_SPEC_OBJECT_TYPE - 1); |
3228 __ j(equal, &function); | 3228 __ j(equal, &function); |
3229 // Assume that there is no larger type. | 3229 // Assume that there is no larger type. |
3230 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); | 3230 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); |
3231 | 3231 |
3232 // Check if the constructor in the map is a JS function. | 3232 // Check if the constructor in the map is a JS function. |
3233 __ movq(rax, FieldOperand(rax, Map::kConstructorOffset)); | 3233 __ movp(rax, FieldOperand(rax, Map::kConstructorOffset)); |
3234 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); | 3234 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); |
3235 __ j(not_equal, &non_function_constructor); | 3235 __ j(not_equal, &non_function_constructor); |
3236 | 3236 |
3237 // rax now contains the constructor function. Grab the | 3237 // rax now contains the constructor function. Grab the |
3238 // instance class name from there. | 3238 // instance class name from there. |
3239 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); | 3239 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); |
3240 __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset)); | 3240 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset)); |
3241 __ jmp(&done); | 3241 __ jmp(&done); |
3242 | 3242 |
3243 // Functions have class 'Function'. | 3243 // Functions have class 'Function'. |
3244 __ bind(&function); | 3244 __ bind(&function); |
3245 __ Move(rax, isolate()->factory()->function_class_string()); | 3245 __ Move(rax, isolate()->factory()->function_class_string()); |
3246 __ jmp(&done); | 3246 __ jmp(&done); |
3247 | 3247 |
3248 // Objects with a non-function constructor have class 'Object'. | 3248 // Objects with a non-function constructor have class 'Object'. |
3249 __ bind(&non_function_constructor); | 3249 __ bind(&non_function_constructor); |
3250 __ Move(rax, isolate()->factory()->Object_string()); | 3250 __ Move(rax, isolate()->factory()->Object_string()); |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3314 ASSERT(args->length() == 1); | 3314 ASSERT(args->length() == 1); |
3315 | 3315 |
3316 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3316 VisitForAccumulatorValue(args->at(0)); // Load the object. |
3317 | 3317 |
3318 Label done; | 3318 Label done; |
3319 // If the object is a smi return the object. | 3319 // If the object is a smi return the object. |
3320 __ JumpIfSmi(rax, &done); | 3320 __ JumpIfSmi(rax, &done); |
3321 // If the object is not a value type, return the object. | 3321 // If the object is not a value type, return the object. |
3322 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx); | 3322 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx); |
3323 __ j(not_equal, &done); | 3323 __ j(not_equal, &done); |
3324 __ movq(rax, FieldOperand(rax, JSValue::kValueOffset)); | 3324 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset)); |
3325 | 3325 |
3326 __ bind(&done); | 3326 __ bind(&done); |
3327 context()->Plug(rax); | 3327 context()->Plug(rax); |
3328 } | 3328 } |
3329 | 3329 |
3330 | 3330 |
3331 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { | 3331 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { |
3332 ZoneList<Expression*>* args = expr->arguments(); | 3332 ZoneList<Expression*>* args = expr->arguments(); |
3333 ASSERT(args->length() == 2); | 3333 ASSERT(args->length() == 2); |
3334 ASSERT_NE(NULL, args->at(1)->AsLiteral()); | 3334 ASSERT_NE(NULL, args->at(1)->AsLiteral()); |
3335 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); | 3335 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); |
3336 | 3336 |
3337 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3337 VisitForAccumulatorValue(args->at(0)); // Load the object. |
3338 | 3338 |
3339 Label runtime, done, not_date_object; | 3339 Label runtime, done, not_date_object; |
3340 Register object = rax; | 3340 Register object = rax; |
3341 Register result = rax; | 3341 Register result = rax; |
3342 Register scratch = rcx; | 3342 Register scratch = rcx; |
3343 | 3343 |
3344 __ JumpIfSmi(object, ¬_date_object); | 3344 __ JumpIfSmi(object, ¬_date_object); |
3345 __ CmpObjectType(object, JS_DATE_TYPE, scratch); | 3345 __ CmpObjectType(object, JS_DATE_TYPE, scratch); |
3346 __ j(not_equal, ¬_date_object); | 3346 __ j(not_equal, ¬_date_object); |
3347 | 3347 |
3348 if (index->value() == 0) { | 3348 if (index->value() == 0) { |
3349 __ movq(result, FieldOperand(object, JSDate::kValueOffset)); | 3349 __ movp(result, FieldOperand(object, JSDate::kValueOffset)); |
3350 __ jmp(&done); | 3350 __ jmp(&done); |
3351 } else { | 3351 } else { |
3352 if (index->value() < JSDate::kFirstUncachedField) { | 3352 if (index->value() < JSDate::kFirstUncachedField) { |
3353 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 3353 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
3354 Operand stamp_operand = __ ExternalOperand(stamp); | 3354 Operand stamp_operand = __ ExternalOperand(stamp); |
3355 __ movq(scratch, stamp_operand); | 3355 __ movp(scratch, stamp_operand); |
3356 __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); | 3356 __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); |
3357 __ j(not_equal, &runtime, Label::kNear); | 3357 __ j(not_equal, &runtime, Label::kNear); |
3358 __ movq(result, FieldOperand(object, JSDate::kValueOffset + | 3358 __ movp(result, FieldOperand(object, JSDate::kValueOffset + |
3359 kPointerSize * index->value())); | 3359 kPointerSize * index->value())); |
3360 __ jmp(&done); | 3360 __ jmp(&done); |
3361 } | 3361 } |
3362 __ bind(&runtime); | 3362 __ bind(&runtime); |
3363 __ PrepareCallCFunction(2); | 3363 __ PrepareCallCFunction(2); |
3364 __ movq(arg_reg_1, object); | 3364 __ movp(arg_reg_1, object); |
3365 __ Move(arg_reg_2, index, RelocInfo::NONE64); | 3365 __ Move(arg_reg_2, index, RelocInfo::NONE64); |
3366 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 3366 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
3367 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 3367 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
3368 __ jmp(&done); | 3368 __ jmp(&done); |
3369 } | 3369 } |
3370 | 3370 |
3371 __ bind(¬_date_object); | 3371 __ bind(¬_date_object); |
3372 __ CallRuntime(Runtime::kThrowNotDateError, 0); | 3372 __ CallRuntime(Runtime::kThrowNotDateError, 0); |
3373 __ bind(&done); | 3373 __ bind(&done); |
3374 context()->Plug(rax); | 3374 context()->Plug(rax); |
3375 } | 3375 } |
3376 | 3376 |
3377 | 3377 |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3463 | 3463 |
3464 Label done; | 3464 Label done; |
3465 // If the object is a smi, return the value. | 3465 // If the object is a smi, return the value. |
3466 __ JumpIfSmi(rbx, &done); | 3466 __ JumpIfSmi(rbx, &done); |
3467 | 3467 |
3468 // If the object is not a value type, return the value. | 3468 // If the object is not a value type, return the value. |
3469 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); | 3469 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); |
3470 __ j(not_equal, &done); | 3470 __ j(not_equal, &done); |
3471 | 3471 |
3472 // Store the value. | 3472 // Store the value. |
3473 __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax); | 3473 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax); |
3474 // Update the write barrier. Save the value as it will be | 3474 // Update the write barrier. Save the value as it will be |
3475 // overwritten by the write barrier code and is needed afterward. | 3475 // overwritten by the write barrier code and is needed afterward. |
3476 __ movq(rdx, rax); | 3476 __ movp(rdx, rax); |
3477 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs); | 3477 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs); |
3478 | 3478 |
3479 __ bind(&done); | 3479 __ bind(&done); |
3480 context()->Plug(rax); | 3480 context()->Plug(rax); |
3481 } | 3481 } |
3482 | 3482 |
3483 | 3483 |
3484 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { | 3484 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { |
3485 ZoneList<Expression*>* args = expr->arguments(); | 3485 ZoneList<Expression*>* args = expr->arguments(); |
3486 ASSERT_EQ(args->length(), 1); | 3486 ASSERT_EQ(args->length(), 1); |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3672 } | 3672 } |
3673 VisitForAccumulatorValue(args->last()); // Function. | 3673 VisitForAccumulatorValue(args->last()); // Function. |
3674 | 3674 |
3675 Label runtime, done; | 3675 Label runtime, done; |
3676 // Check for non-function argument (including proxy). | 3676 // Check for non-function argument (including proxy). |
3677 __ JumpIfSmi(rax, &runtime); | 3677 __ JumpIfSmi(rax, &runtime); |
3678 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); | 3678 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); |
3679 __ j(not_equal, &runtime); | 3679 __ j(not_equal, &runtime); |
3680 | 3680 |
3681 // InvokeFunction requires the function in rdi. Move it in there. | 3681 // InvokeFunction requires the function in rdi. Move it in there. |
3682 __ movq(rdi, result_register()); | 3682 __ movp(rdi, result_register()); |
3683 ParameterCount count(arg_count); | 3683 ParameterCount count(arg_count); |
3684 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper()); | 3684 __ InvokeFunction(rdi, count, CALL_FUNCTION, NullCallWrapper()); |
3685 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 3685 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
3686 __ jmp(&done); | 3686 __ jmp(&done); |
3687 | 3687 |
3688 __ bind(&runtime); | 3688 __ bind(&runtime); |
3689 __ push(rax); | 3689 __ push(rax); |
3690 __ CallRuntime(Runtime::kCall, args->length()); | 3690 __ CallRuntime(Runtime::kCall, args->length()); |
3691 __ bind(&done); | 3691 __ bind(&done); |
3692 | 3692 |
3693 context()->Plug(rax); | 3693 context()->Plug(rax); |
3694 } | 3694 } |
3695 | 3695 |
(...skipping 24 matching lines...) Expand all Loading... |
3720 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 3720 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
3721 context()->Plug(rax); | 3721 context()->Plug(rax); |
3722 return; | 3722 return; |
3723 } | 3723 } |
3724 | 3724 |
3725 VisitForAccumulatorValue(args->at(1)); | 3725 VisitForAccumulatorValue(args->at(1)); |
3726 | 3726 |
3727 Register key = rax; | 3727 Register key = rax; |
3728 Register cache = rbx; | 3728 Register cache = rbx; |
3729 Register tmp = rcx; | 3729 Register tmp = rcx; |
3730 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); | 3730 __ movp(cache, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); |
3731 __ movq(cache, | 3731 __ movp(cache, |
3732 FieldOperand(cache, GlobalObject::kNativeContextOffset)); | 3732 FieldOperand(cache, GlobalObject::kNativeContextOffset)); |
3733 __ movq(cache, | 3733 __ movp(cache, |
3734 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); | 3734 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); |
3735 __ movq(cache, | 3735 __ movp(cache, |
3736 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | 3736 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); |
3737 | 3737 |
3738 Label done, not_found; | 3738 Label done, not_found; |
3739 // tmp now holds finger offset as a smi. | 3739 // tmp now holds finger offset as a smi. |
3740 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 3740 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
3741 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); | 3741 __ movp(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); |
3742 SmiIndex index = | 3742 SmiIndex index = |
3743 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); | 3743 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); |
3744 __ cmpq(key, FieldOperand(cache, | 3744 __ cmpq(key, FieldOperand(cache, |
3745 index.reg, | 3745 index.reg, |
3746 index.scale, | 3746 index.scale, |
3747 FixedArray::kHeaderSize)); | 3747 FixedArray::kHeaderSize)); |
3748 __ j(not_equal, ¬_found, Label::kNear); | 3748 __ j(not_equal, ¬_found, Label::kNear); |
3749 __ movq(rax, FieldOperand(cache, | 3749 __ movp(rax, FieldOperand(cache, |
3750 index.reg, | 3750 index.reg, |
3751 index.scale, | 3751 index.scale, |
3752 FixedArray::kHeaderSize + kPointerSize)); | 3752 FixedArray::kHeaderSize + kPointerSize)); |
3753 __ jmp(&done, Label::kNear); | 3753 __ jmp(&done, Label::kNear); |
3754 | 3754 |
3755 __ bind(¬_found); | 3755 __ bind(¬_found); |
3756 // Call runtime to perform the lookup. | 3756 // Call runtime to perform the lookup. |
3757 __ push(cache); | 3757 __ push(cache); |
3758 __ push(key); | 3758 __ push(key); |
3759 __ CallRuntime(Runtime::kGetFromCache, 2); | 3759 __ CallRuntime(Runtime::kGetFromCache, 2); |
(...skipping 15 matching lines...) Expand all Loading... |
3775 VisitForAccumulatorValue(args->at(1)); | 3775 VisitForAccumulatorValue(args->at(1)); |
3776 __ pop(left); | 3776 __ pop(left); |
3777 | 3777 |
3778 Label done, fail, ok; | 3778 Label done, fail, ok; |
3779 __ cmpq(left, right); | 3779 __ cmpq(left, right); |
3780 __ j(equal, &ok, Label::kNear); | 3780 __ j(equal, &ok, Label::kNear); |
3781 // Fail if either is a non-HeapObject. | 3781 // Fail if either is a non-HeapObject. |
3782 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp); | 3782 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp); |
3783 __ j(either_smi, &fail, Label::kNear); | 3783 __ j(either_smi, &fail, Label::kNear); |
3784 __ j(zero, &fail, Label::kNear); | 3784 __ j(zero, &fail, Label::kNear); |
3785 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset)); | 3785 __ movp(tmp, FieldOperand(left, HeapObject::kMapOffset)); |
3786 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), | 3786 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), |
3787 Immediate(JS_REGEXP_TYPE)); | 3787 Immediate(JS_REGEXP_TYPE)); |
3788 __ j(not_equal, &fail, Label::kNear); | 3788 __ j(not_equal, &fail, Label::kNear); |
3789 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); | 3789 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); |
3790 __ j(not_equal, &fail, Label::kNear); | 3790 __ j(not_equal, &fail, Label::kNear); |
3791 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset)); | 3791 __ movp(tmp, FieldOperand(left, JSRegExp::kDataOffset)); |
3792 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); | 3792 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); |
3793 __ j(equal, &ok, Label::kNear); | 3793 __ j(equal, &ok, Label::kNear); |
3794 __ bind(&fail); | 3794 __ bind(&fail); |
3795 __ Move(rax, isolate()->factory()->false_value()); | 3795 __ Move(rax, isolate()->factory()->false_value()); |
3796 __ jmp(&done, Label::kNear); | 3796 __ jmp(&done, Label::kNear); |
3797 __ bind(&ok); | 3797 __ bind(&ok); |
3798 __ Move(rax, isolate()->factory()->true_value()); | 3798 __ Move(rax, isolate()->factory()->true_value()); |
3799 __ bind(&done); | 3799 __ bind(&done); |
3800 | 3800 |
3801 context()->Plug(rax); | 3801 context()->Plug(rax); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3876 // Check that the array is a JSArray | 3876 // Check that the array is a JSArray |
3877 __ JumpIfSmi(array, &bailout); | 3877 __ JumpIfSmi(array, &bailout); |
3878 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch); | 3878 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch); |
3879 __ j(not_equal, &bailout); | 3879 __ j(not_equal, &bailout); |
3880 | 3880 |
3881 // Check that the array has fast elements. | 3881 // Check that the array has fast elements. |
3882 __ CheckFastElements(scratch, &bailout); | 3882 __ CheckFastElements(scratch, &bailout); |
3883 | 3883 |
3884 // Array has fast elements, so its length must be a smi. | 3884 // Array has fast elements, so its length must be a smi. |
3885 // If the array has length zero, return the empty string. | 3885 // If the array has length zero, return the empty string. |
3886 __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset)); | 3886 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset)); |
3887 __ SmiCompare(array_length, Smi::FromInt(0)); | 3887 __ SmiCompare(array_length, Smi::FromInt(0)); |
3888 __ j(not_zero, &non_trivial_array); | 3888 __ j(not_zero, &non_trivial_array); |
3889 __ LoadRoot(rax, Heap::kempty_stringRootIndex); | 3889 __ LoadRoot(rax, Heap::kempty_stringRootIndex); |
3890 __ jmp(&return_result); | 3890 __ jmp(&return_result); |
3891 | 3891 |
3892 // Save the array length on the stack. | 3892 // Save the array length on the stack. |
3893 __ bind(&non_trivial_array); | 3893 __ bind(&non_trivial_array); |
3894 __ SmiToInteger32(array_length, array_length); | 3894 __ SmiToInteger32(array_length, array_length); |
3895 __ movl(array_length_operand, array_length); | 3895 __ movl(array_length_operand, array_length); |
3896 | 3896 |
3897 // Save the FixedArray containing array's elements. | 3897 // Save the FixedArray containing array's elements. |
3898 // End of array's live range. | 3898 // End of array's live range. |
3899 elements = array; | 3899 elements = array; |
3900 __ movq(elements, FieldOperand(array, JSArray::kElementsOffset)); | 3900 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset)); |
3901 array = no_reg; | 3901 array = no_reg; |
3902 | 3902 |
3903 | 3903 |
3904 // Check that all array elements are sequential ASCII strings, and | 3904 // Check that all array elements are sequential ASCII strings, and |
3905 // accumulate the sum of their lengths, as a smi-encoded value. | 3905 // accumulate the sum of their lengths, as a smi-encoded value. |
3906 __ Set(index, 0); | 3906 __ Set(index, 0); |
3907 __ Set(string_length, 0); | 3907 __ Set(string_length, 0); |
3908 // Loop condition: while (index < array_length). | 3908 // Loop condition: while (index < array_length). |
3909 // Live loop registers: index(int32), array_length(int32), string(String*), | 3909 // Live loop registers: index(int32), array_length(int32), string(String*), |
3910 // scratch, string_length(int32), elements(FixedArray*). | 3910 // scratch, string_length(int32), elements(FixedArray*). |
3911 if (generate_debug_code_) { | 3911 if (generate_debug_code_) { |
3912 __ cmpq(index, array_length); | 3912 __ cmpq(index, array_length); |
3913 __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin); | 3913 __ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin); |
3914 } | 3914 } |
3915 __ bind(&loop); | 3915 __ bind(&loop); |
3916 __ movq(string, FieldOperand(elements, | 3916 __ movp(string, FieldOperand(elements, |
3917 index, | 3917 index, |
3918 times_pointer_size, | 3918 times_pointer_size, |
3919 FixedArray::kHeaderSize)); | 3919 FixedArray::kHeaderSize)); |
3920 __ JumpIfSmi(string, &bailout); | 3920 __ JumpIfSmi(string, &bailout); |
3921 __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset)); | 3921 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset)); |
3922 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); | 3922 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
3923 __ andb(scratch, Immediate( | 3923 __ andb(scratch, Immediate( |
3924 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); | 3924 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); |
3925 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag)); | 3925 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag)); |
3926 __ j(not_equal, &bailout); | 3926 __ j(not_equal, &bailout); |
3927 __ AddSmiField(string_length, | 3927 __ AddSmiField(string_length, |
3928 FieldOperand(string, SeqOneByteString::kLengthOffset)); | 3928 FieldOperand(string, SeqOneByteString::kLengthOffset)); |
3929 __ j(overflow, &bailout); | 3929 __ j(overflow, &bailout); |
3930 __ incl(index); | 3930 __ incl(index); |
3931 __ cmpl(index, array_length); | 3931 __ cmpl(index, array_length); |
3932 __ j(less, &loop); | 3932 __ j(less, &loop); |
3933 | 3933 |
3934 // Live registers: | 3934 // Live registers: |
3935 // string_length: Sum of string lengths. | 3935 // string_length: Sum of string lengths. |
3936 // elements: FixedArray of strings. | 3936 // elements: FixedArray of strings. |
3937 // index: Array length. | 3937 // index: Array length. |
3938 // array_length: Array length. | 3938 // array_length: Array length. |
3939 | 3939 |
3940 // If array_length is 1, return elements[0], a string. | 3940 // If array_length is 1, return elements[0], a string. |
3941 __ cmpl(array_length, Immediate(1)); | 3941 __ cmpl(array_length, Immediate(1)); |
3942 __ j(not_equal, ¬_size_one_array); | 3942 __ j(not_equal, ¬_size_one_array); |
3943 __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize)); | 3943 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize)); |
3944 __ jmp(&return_result); | 3944 __ jmp(&return_result); |
3945 | 3945 |
3946 __ bind(¬_size_one_array); | 3946 __ bind(¬_size_one_array); |
3947 | 3947 |
3948 // End of array_length live range. | 3948 // End of array_length live range. |
3949 result_pos = array_length; | 3949 result_pos = array_length; |
3950 array_length = no_reg; | 3950 array_length = no_reg; |
3951 | 3951 |
3952 // Live registers: | 3952 // Live registers: |
3953 // string_length: Sum of string lengths. | 3953 // string_length: Sum of string lengths. |
3954 // elements: FixedArray of strings. | 3954 // elements: FixedArray of strings. |
3955 // index: Array length. | 3955 // index: Array length. |
3956 | 3956 |
3957 // Check that the separator is a sequential ASCII string. | 3957 // Check that the separator is a sequential ASCII string. |
3958 __ movq(string, separator_operand); | 3958 __ movp(string, separator_operand); |
3959 __ JumpIfSmi(string, &bailout); | 3959 __ JumpIfSmi(string, &bailout); |
3960 __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset)); | 3960 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset)); |
3961 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); | 3961 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
3962 __ andb(scratch, Immediate( | 3962 __ andb(scratch, Immediate( |
3963 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); | 3963 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); |
3964 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag)); | 3964 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag)); |
3965 __ j(not_equal, &bailout); | 3965 __ j(not_equal, &bailout); |
3966 | 3966 |
3967 // Live registers: | 3967 // Live registers: |
3968 // string_length: Sum of string lengths. | 3968 // string_length: Sum of string lengths. |
3969 // elements: FixedArray of strings. | 3969 // elements: FixedArray of strings. |
3970 // index: Array length. | 3970 // index: Array length. |
3971 // string: Separator string. | 3971 // string: Separator string. |
3972 | 3972 |
3973 // Add (separator length times (array_length - 1)) to string_length. | 3973 // Add (separator length times (array_length - 1)) to string_length. |
3974 __ SmiToInteger32(scratch, | 3974 __ SmiToInteger32(scratch, |
3975 FieldOperand(string, SeqOneByteString::kLengthOffset)); | 3975 FieldOperand(string, SeqOneByteString::kLengthOffset)); |
3976 __ decl(index); | 3976 __ decl(index); |
3977 __ imull(scratch, index); | 3977 __ imull(scratch, index); |
3978 __ j(overflow, &bailout); | 3978 __ j(overflow, &bailout); |
3979 __ addl(string_length, scratch); | 3979 __ addl(string_length, scratch); |
3980 __ j(overflow, &bailout); | 3980 __ j(overflow, &bailout); |
3981 | 3981 |
3982 // Live registers and stack values: | 3982 // Live registers and stack values: |
3983 // string_length: Total length of result string. | 3983 // string_length: Total length of result string. |
3984 // elements: FixedArray of strings. | 3984 // elements: FixedArray of strings. |
3985 __ AllocateAsciiString(result_pos, string_length, scratch, | 3985 __ AllocateAsciiString(result_pos, string_length, scratch, |
3986 index, string, &bailout); | 3986 index, string, &bailout); |
3987 __ movq(result_operand, result_pos); | 3987 __ movp(result_operand, result_pos); |
3988 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize)); | 3988 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize)); |
3989 | 3989 |
3990 __ movq(string, separator_operand); | 3990 __ movp(string, separator_operand); |
3991 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset), | 3991 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset), |
3992 Smi::FromInt(1)); | 3992 Smi::FromInt(1)); |
3993 __ j(equal, &one_char_separator); | 3993 __ j(equal, &one_char_separator); |
3994 __ j(greater, &long_separator); | 3994 __ j(greater, &long_separator); |
3995 | 3995 |
3996 | 3996 |
3997 // Empty separator case: | 3997 // Empty separator case: |
3998 __ Set(index, 0); | 3998 __ Set(index, 0); |
3999 __ movl(scratch, array_length_operand); | 3999 __ movl(scratch, array_length_operand); |
4000 __ jmp(&loop_1_condition); | 4000 __ jmp(&loop_1_condition); |
4001 // Loop condition: while (index < array_length). | 4001 // Loop condition: while (index < array_length). |
4002 __ bind(&loop_1); | 4002 __ bind(&loop_1); |
4003 // Each iteration of the loop concatenates one string to the result. | 4003 // Each iteration of the loop concatenates one string to the result. |
4004 // Live values in registers: | 4004 // Live values in registers: |
4005 // index: which element of the elements array we are adding to the result. | 4005 // index: which element of the elements array we are adding to the result. |
4006 // result_pos: the position to which we are currently copying characters. | 4006 // result_pos: the position to which we are currently copying characters. |
4007 // elements: the FixedArray of strings we are joining. | 4007 // elements: the FixedArray of strings we are joining. |
4008 // scratch: array length. | 4008 // scratch: array length. |
4009 | 4009 |
4010 // Get string = array[index]. | 4010 // Get string = array[index]. |
4011 __ movq(string, FieldOperand(elements, index, | 4011 __ movp(string, FieldOperand(elements, index, |
4012 times_pointer_size, | 4012 times_pointer_size, |
4013 FixedArray::kHeaderSize)); | 4013 FixedArray::kHeaderSize)); |
4014 __ SmiToInteger32(string_length, | 4014 __ SmiToInteger32(string_length, |
4015 FieldOperand(string, String::kLengthOffset)); | 4015 FieldOperand(string, String::kLengthOffset)); |
4016 __ lea(string, | 4016 __ lea(string, |
4017 FieldOperand(string, SeqOneByteString::kHeaderSize)); | 4017 FieldOperand(string, SeqOneByteString::kHeaderSize)); |
4018 __ CopyBytes(result_pos, string, string_length); | 4018 __ CopyBytes(result_pos, string, string_length); |
4019 __ incl(index); | 4019 __ incl(index); |
4020 __ bind(&loop_1_condition); | 4020 __ bind(&loop_1_condition); |
4021 __ cmpl(index, scratch); | 4021 __ cmpl(index, scratch); |
(...skipping 23 matching lines...) Expand all Loading... |
4045 // index: which element of the elements array we are adding to the result. | 4045 // index: which element of the elements array we are adding to the result. |
4046 // result_pos: the position to which we are currently copying characters. | 4046 // result_pos: the position to which we are currently copying characters. |
4047 // scratch: Separator character. | 4047 // scratch: Separator character. |
4048 | 4048 |
4049 // Copy the separator character to the result. | 4049 // Copy the separator character to the result. |
4050 __ movb(Operand(result_pos, 0), scratch); | 4050 __ movb(Operand(result_pos, 0), scratch); |
4051 __ incq(result_pos); | 4051 __ incq(result_pos); |
4052 | 4052 |
4053 __ bind(&loop_2_entry); | 4053 __ bind(&loop_2_entry); |
4054 // Get string = array[index]. | 4054 // Get string = array[index]. |
4055 __ movq(string, FieldOperand(elements, index, | 4055 __ movp(string, FieldOperand(elements, index, |
4056 times_pointer_size, | 4056 times_pointer_size, |
4057 FixedArray::kHeaderSize)); | 4057 FixedArray::kHeaderSize)); |
4058 __ SmiToInteger32(string_length, | 4058 __ SmiToInteger32(string_length, |
4059 FieldOperand(string, String::kLengthOffset)); | 4059 FieldOperand(string, String::kLengthOffset)); |
4060 __ lea(string, | 4060 __ lea(string, |
4061 FieldOperand(string, SeqOneByteString::kHeaderSize)); | 4061 FieldOperand(string, SeqOneByteString::kHeaderSize)); |
4062 __ CopyBytes(result_pos, string, string_length); | 4062 __ CopyBytes(result_pos, string, string_length); |
4063 __ incl(index); | 4063 __ incl(index); |
4064 __ cmpl(index, array_length_operand); | 4064 __ cmpl(index, array_length_operand); |
4065 __ j(less, &loop_2); // End while (index < length). | 4065 __ j(less, &loop_2); // End while (index < length). |
4066 __ jmp(&done); | 4066 __ jmp(&done); |
4067 | 4067 |
4068 | 4068 |
4069 // Long separator case (separator is more than one character). | 4069 // Long separator case (separator is more than one character). |
4070 __ bind(&long_separator); | 4070 __ bind(&long_separator); |
4071 | 4071 |
4072 // Make elements point to end of elements array, and index | 4072 // Make elements point to end of elements array, and index |
4073 // count from -array_length to zero, so we don't need to maintain | 4073 // count from -array_length to zero, so we don't need to maintain |
4074 // a loop limit. | 4074 // a loop limit. |
4075 __ movl(index, array_length_operand); | 4075 __ movl(index, array_length_operand); |
4076 __ lea(elements, FieldOperand(elements, index, times_pointer_size, | 4076 __ lea(elements, FieldOperand(elements, index, times_pointer_size, |
4077 FixedArray::kHeaderSize)); | 4077 FixedArray::kHeaderSize)); |
4078 __ neg(index); | 4078 __ neg(index); |
4079 | 4079 |
4080 // Replace separator string with pointer to its first character, and | 4080 // Replace separator string with pointer to its first character, and |
4081 // make scratch be its length. | 4081 // make scratch be its length. |
4082 __ movq(string, separator_operand); | 4082 __ movp(string, separator_operand); |
4083 __ SmiToInteger32(scratch, | 4083 __ SmiToInteger32(scratch, |
4084 FieldOperand(string, String::kLengthOffset)); | 4084 FieldOperand(string, String::kLengthOffset)); |
4085 __ lea(string, | 4085 __ lea(string, |
4086 FieldOperand(string, SeqOneByteString::kHeaderSize)); | 4086 FieldOperand(string, SeqOneByteString::kHeaderSize)); |
4087 __ movq(separator_operand, string); | 4087 __ movp(separator_operand, string); |
4088 | 4088 |
4089 // Jump into the loop after the code that copies the separator, so the first | 4089 // Jump into the loop after the code that copies the separator, so the first |
4090 // element is not preceded by a separator | 4090 // element is not preceded by a separator |
4091 __ jmp(&loop_3_entry); | 4091 __ jmp(&loop_3_entry); |
4092 // Loop condition: while (index < length). | 4092 // Loop condition: while (index < length). |
4093 __ bind(&loop_3); | 4093 __ bind(&loop_3); |
4094 // Each iteration of the loop concatenates one string to the result. | 4094 // Each iteration of the loop concatenates one string to the result. |
4095 // Live values in registers: | 4095 // Live values in registers: |
4096 // index: which element of the elements array we are adding to the result. | 4096 // index: which element of the elements array we are adding to the result. |
4097 // result_pos: the position to which we are currently copying characters. | 4097 // result_pos: the position to which we are currently copying characters. |
4098 // scratch: Separator length. | 4098 // scratch: Separator length. |
4099 // separator_operand (rsp[0x10]): Address of first char of separator. | 4099 // separator_operand (rsp[0x10]): Address of first char of separator. |
4100 | 4100 |
4101 // Copy the separator to the result. | 4101 // Copy the separator to the result. |
4102 __ movq(string, separator_operand); | 4102 __ movp(string, separator_operand); |
4103 __ movl(string_length, scratch); | 4103 __ movl(string_length, scratch); |
4104 __ CopyBytes(result_pos, string, string_length, 2); | 4104 __ CopyBytes(result_pos, string, string_length, 2); |
4105 | 4105 |
4106 __ bind(&loop_3_entry); | 4106 __ bind(&loop_3_entry); |
4107 // Get string = array[index]. | 4107 // Get string = array[index]. |
4108 __ movq(string, Operand(elements, index, times_pointer_size, 0)); | 4108 __ movp(string, Operand(elements, index, times_pointer_size, 0)); |
4109 __ SmiToInteger32(string_length, | 4109 __ SmiToInteger32(string_length, |
4110 FieldOperand(string, String::kLengthOffset)); | 4110 FieldOperand(string, String::kLengthOffset)); |
4111 __ lea(string, | 4111 __ lea(string, |
4112 FieldOperand(string, SeqOneByteString::kHeaderSize)); | 4112 FieldOperand(string, SeqOneByteString::kHeaderSize)); |
4113 __ CopyBytes(result_pos, string, string_length); | 4113 __ CopyBytes(result_pos, string, string_length); |
4114 __ incq(index); | 4114 __ incq(index); |
4115 __ j(not_equal, &loop_3); // Loop while (index < 0). | 4115 __ j(not_equal, &loop_3); // Loop while (index < 0). |
4116 | 4116 |
4117 __ bind(&done); | 4117 __ bind(&done); |
4118 __ movq(rax, result_operand); | 4118 __ movp(rax, result_operand); |
4119 | 4119 |
4120 __ bind(&return_result); | 4120 __ bind(&return_result); |
4121 // Drop temp values from the stack, and restore context register. | 4121 // Drop temp values from the stack, and restore context register. |
4122 __ addq(rsp, Immediate(3 * kPointerSize)); | 4122 __ addq(rsp, Immediate(3 * kPointerSize)); |
4123 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4123 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
4124 context()->Plug(rax); | 4124 context()->Plug(rax); |
4125 } | 4125 } |
4126 | 4126 |
4127 | 4127 |
4128 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | 4128 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
4129 Handle<String> name = expr->name(); | 4129 Handle<String> name = expr->name(); |
4130 if (name->length() > 0 && name->Get(0) == '_') { | 4130 if (name->length() > 0 && name->Get(0) == '_') { |
4131 Comment cmnt(masm_, "[ InlineRuntimeCall"); | 4131 Comment cmnt(masm_, "[ InlineRuntimeCall"); |
4132 EmitInlineRuntimeCall(expr); | 4132 EmitInlineRuntimeCall(expr); |
4133 return; | 4133 return; |
4134 } | 4134 } |
4135 | 4135 |
4136 Comment cmnt(masm_, "[ CallRuntime"); | 4136 Comment cmnt(masm_, "[ CallRuntime"); |
4137 ZoneList<Expression*>* args = expr->arguments(); | 4137 ZoneList<Expression*>* args = expr->arguments(); |
4138 | 4138 |
4139 if (expr->is_jsruntime()) { | 4139 if (expr->is_jsruntime()) { |
4140 // Prepare for calling JS runtime function. | 4140 // Prepare for calling JS runtime function. |
4141 __ movq(rax, GlobalObjectOperand()); | 4141 __ movp(rax, GlobalObjectOperand()); |
4142 __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset)); | 4142 __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset)); |
4143 } | 4143 } |
4144 | 4144 |
4145 // Push the arguments ("left-to-right"). | 4145 // Push the arguments ("left-to-right"). |
4146 int arg_count = args->length(); | 4146 int arg_count = args->length(); |
4147 for (int i = 0; i < arg_count; i++) { | 4147 for (int i = 0; i < arg_count; i++) { |
4148 VisitForStackValue(args->at(i)); | 4148 VisitForStackValue(args->at(i)); |
4149 } | 4149 } |
4150 | 4150 |
4151 if (expr->is_jsruntime()) { | 4151 if (expr->is_jsruntime()) { |
4152 // Call the JS runtime function using a call IC. | 4152 // Call the JS runtime function using a call IC. |
4153 __ Move(rcx, expr->name()); | 4153 __ Move(rcx, expr->name()); |
4154 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count); | 4154 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(arg_count); |
4155 CallIC(ic, NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); | 4155 CallIC(ic, NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); |
4156 // Restore context register. | 4156 // Restore context register. |
4157 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4157 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
4158 } else { | 4158 } else { |
4159 __ CallRuntime(expr->function(), arg_count); | 4159 __ CallRuntime(expr->function(), arg_count); |
4160 } | 4160 } |
4161 context()->Plug(rax); | 4161 context()->Plug(rax); |
4162 } | 4162 } |
4163 | 4163 |
4164 | 4164 |
4165 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 4165 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
4166 switch (expr->op()) { | 4166 switch (expr->op()) { |
4167 case Token::DELETE: { | 4167 case Token::DELETE: { |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4311 if (expr->is_postfix() && !context()->IsEffect()) { | 4311 if (expr->is_postfix() && !context()->IsEffect()) { |
4312 __ Push(Smi::FromInt(0)); | 4312 __ Push(Smi::FromInt(0)); |
4313 } | 4313 } |
4314 if (assign_type == NAMED_PROPERTY) { | 4314 if (assign_type == NAMED_PROPERTY) { |
4315 VisitForAccumulatorValue(prop->obj()); | 4315 VisitForAccumulatorValue(prop->obj()); |
4316 __ push(rax); // Copy of receiver, needed for later store. | 4316 __ push(rax); // Copy of receiver, needed for later store. |
4317 EmitNamedPropertyLoad(prop); | 4317 EmitNamedPropertyLoad(prop); |
4318 } else { | 4318 } else { |
4319 VisitForStackValue(prop->obj()); | 4319 VisitForStackValue(prop->obj()); |
4320 VisitForAccumulatorValue(prop->key()); | 4320 VisitForAccumulatorValue(prop->key()); |
4321 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack | 4321 __ movp(rdx, Operand(rsp, 0)); // Leave receiver on stack |
4322 __ push(rax); // Copy of key, needed for later store. | 4322 __ push(rax); // Copy of key, needed for later store. |
4323 EmitKeyedPropertyLoad(prop); | 4323 EmitKeyedPropertyLoad(prop); |
4324 } | 4324 } |
4325 } | 4325 } |
4326 | 4326 |
4327 // We need a second deoptimization point after loading the value | 4327 // We need a second deoptimization point after loading the value |
4328 // in case evaluating the property load my have a side effect. | 4328 // in case evaluating the property load my have a side effect. |
4329 if (assign_type == VARIABLE) { | 4329 if (assign_type == VARIABLE) { |
4330 PrepareForBailout(expr->expression(), TOS_REG); | 4330 PrepareForBailout(expr->expression(), TOS_REG); |
4331 } else { | 4331 } else { |
(...skipping 11 matching lines...) Expand all Loading... |
4343 if (expr->is_postfix()) { | 4343 if (expr->is_postfix()) { |
4344 if (!context()->IsEffect()) { | 4344 if (!context()->IsEffect()) { |
4345 // Save the result on the stack. If we have a named or keyed property | 4345 // Save the result on the stack. If we have a named or keyed property |
4346 // we store the result under the receiver that is currently on top | 4346 // we store the result under the receiver that is currently on top |
4347 // of the stack. | 4347 // of the stack. |
4348 switch (assign_type) { | 4348 switch (assign_type) { |
4349 case VARIABLE: | 4349 case VARIABLE: |
4350 __ push(rax); | 4350 __ push(rax); |
4351 break; | 4351 break; |
4352 case NAMED_PROPERTY: | 4352 case NAMED_PROPERTY: |
4353 __ movq(Operand(rsp, kPointerSize), rax); | 4353 __ movp(Operand(rsp, kPointerSize), rax); |
4354 break; | 4354 break; |
4355 case KEYED_PROPERTY: | 4355 case KEYED_PROPERTY: |
4356 __ movq(Operand(rsp, 2 * kPointerSize), rax); | 4356 __ movp(Operand(rsp, 2 * kPointerSize), rax); |
4357 break; | 4357 break; |
4358 } | 4358 } |
4359 } | 4359 } |
4360 } | 4360 } |
4361 | 4361 |
4362 SmiOperationExecutionMode mode; | 4362 SmiOperationExecutionMode mode; |
4363 mode.Add(PRESERVE_SOURCE_REGISTER); | 4363 mode.Add(PRESERVE_SOURCE_REGISTER); |
4364 mode.Add(BAILOUT_ON_NO_OVERFLOW); | 4364 mode.Add(BAILOUT_ON_NO_OVERFLOW); |
4365 if (expr->op() == Token::INC) { | 4365 if (expr->op() == Token::INC) { |
4366 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear); | 4366 __ SmiAddConstant(rax, rax, Smi::FromInt(1), mode, &done, Label::kNear); |
(...skipping 11 matching lines...) Expand all Loading... |
4378 if (expr->is_postfix()) { | 4378 if (expr->is_postfix()) { |
4379 if (!context()->IsEffect()) { | 4379 if (!context()->IsEffect()) { |
4380 // Save the result on the stack. If we have a named or keyed property | 4380 // Save the result on the stack. If we have a named or keyed property |
4381 // we store the result under the receiver that is currently on top | 4381 // we store the result under the receiver that is currently on top |
4382 // of the stack. | 4382 // of the stack. |
4383 switch (assign_type) { | 4383 switch (assign_type) { |
4384 case VARIABLE: | 4384 case VARIABLE: |
4385 __ push(rax); | 4385 __ push(rax); |
4386 break; | 4386 break; |
4387 case NAMED_PROPERTY: | 4387 case NAMED_PROPERTY: |
4388 __ movq(Operand(rsp, kPointerSize), rax); | 4388 __ movp(Operand(rsp, kPointerSize), rax); |
4389 break; | 4389 break; |
4390 case KEYED_PROPERTY: | 4390 case KEYED_PROPERTY: |
4391 __ movq(Operand(rsp, 2 * kPointerSize), rax); | 4391 __ movp(Operand(rsp, 2 * kPointerSize), rax); |
4392 break; | 4392 break; |
4393 } | 4393 } |
4394 } | 4394 } |
4395 } | 4395 } |
4396 | 4396 |
4397 // Record position before stub call. | 4397 // Record position before stub call. |
4398 SetSourcePosition(expr->position()); | 4398 SetSourcePosition(expr->position()); |
4399 | 4399 |
4400 // Call stub for +1/-1. | 4400 // Call stub for +1/-1. |
4401 __ bind(&stub_call); | 4401 __ bind(&stub_call); |
4402 __ movq(rdx, rax); | 4402 __ movp(rdx, rax); |
4403 __ Move(rax, Smi::FromInt(1)); | 4403 __ Move(rax, Smi::FromInt(1)); |
4404 BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE); | 4404 BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE); |
4405 CallIC(stub.GetCode(isolate()), | 4405 CallIC(stub.GetCode(isolate()), |
4406 NOT_CONTEXTUAL, | 4406 NOT_CONTEXTUAL, |
4407 expr->CountBinOpFeedbackId()); | 4407 expr->CountBinOpFeedbackId()); |
4408 patch_site.EmitPatchInfo(); | 4408 patch_site.EmitPatchInfo(); |
4409 __ bind(&done); | 4409 __ bind(&done); |
4410 | 4410 |
4411 // Store the value returned in rax. | 4411 // Store the value returned in rax. |
4412 switch (assign_type) { | 4412 switch (assign_type) { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4468 | 4468 |
4469 | 4469 |
4470 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 4470 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
4471 VariableProxy* proxy = expr->AsVariableProxy(); | 4471 VariableProxy* proxy = expr->AsVariableProxy(); |
4472 ASSERT(!context()->IsEffect()); | 4472 ASSERT(!context()->IsEffect()); |
4473 ASSERT(!context()->IsTest()); | 4473 ASSERT(!context()->IsTest()); |
4474 | 4474 |
4475 if (proxy != NULL && proxy->var()->IsUnallocated()) { | 4475 if (proxy != NULL && proxy->var()->IsUnallocated()) { |
4476 Comment cmnt(masm_, "Global variable"); | 4476 Comment cmnt(masm_, "Global variable"); |
4477 __ Move(rcx, proxy->name()); | 4477 __ Move(rcx, proxy->name()); |
4478 __ movq(rax, GlobalObjectOperand()); | 4478 __ movp(rax, GlobalObjectOperand()); |
4479 // Use a regular load, not a contextual load, to avoid a reference | 4479 // Use a regular load, not a contextual load, to avoid a reference |
4480 // error. | 4480 // error. |
4481 CallLoadIC(NOT_CONTEXTUAL); | 4481 CallLoadIC(NOT_CONTEXTUAL); |
4482 PrepareForBailout(expr, TOS_REG); | 4482 PrepareForBailout(expr, TOS_REG); |
4483 context()->Plug(rax); | 4483 context()->Plug(rax); |
4484 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { | 4484 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
4485 Label done, slow; | 4485 Label done, slow; |
4486 | 4486 |
4487 // Generate code for loading from variables potentially shadowed | 4487 // Generate code for loading from variables potentially shadowed |
4488 // by eval-introduced variables. | 4488 // by eval-introduced variables. |
(...skipping 24 matching lines...) Expand all Loading... |
4513 context()->PrepareTest(&materialize_true, &materialize_false, | 4513 context()->PrepareTest(&materialize_true, &materialize_false, |
4514 &if_true, &if_false, &fall_through); | 4514 &if_true, &if_false, &fall_through); |
4515 | 4515 |
4516 { AccumulatorValueContext context(this); | 4516 { AccumulatorValueContext context(this); |
4517 VisitForTypeofValue(sub_expr); | 4517 VisitForTypeofValue(sub_expr); |
4518 } | 4518 } |
4519 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 4519 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
4520 | 4520 |
4521 if (check->Equals(isolate()->heap()->number_string())) { | 4521 if (check->Equals(isolate()->heap()->number_string())) { |
4522 __ JumpIfSmi(rax, if_true); | 4522 __ JumpIfSmi(rax, if_true); |
4523 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); | 4523 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
4524 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); | 4524 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); |
4525 Split(equal, if_true, if_false, fall_through); | 4525 Split(equal, if_true, if_false, fall_through); |
4526 } else if (check->Equals(isolate()->heap()->string_string())) { | 4526 } else if (check->Equals(isolate()->heap()->string_string())) { |
4527 __ JumpIfSmi(rax, if_false); | 4527 __ JumpIfSmi(rax, if_false); |
4528 // Check for undetectable objects => false. | 4528 // Check for undetectable objects => false. |
4529 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); | 4529 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); |
4530 __ j(above_equal, if_false); | 4530 __ j(above_equal, if_false); |
4531 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), | 4531 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), |
4532 Immediate(1 << Map::kIsUndetectable)); | 4532 Immediate(1 << Map::kIsUndetectable)); |
4533 Split(zero, if_true, if_false, fall_through); | 4533 Split(zero, if_true, if_false, fall_through); |
4534 } else if (check->Equals(isolate()->heap()->symbol_string())) { | 4534 } else if (check->Equals(isolate()->heap()->symbol_string())) { |
4535 __ JumpIfSmi(rax, if_false); | 4535 __ JumpIfSmi(rax, if_false); |
4536 __ CmpObjectType(rax, SYMBOL_TYPE, rdx); | 4536 __ CmpObjectType(rax, SYMBOL_TYPE, rdx); |
4537 Split(equal, if_true, if_false, fall_through); | 4537 Split(equal, if_true, if_false, fall_through); |
4538 } else if (check->Equals(isolate()->heap()->boolean_string())) { | 4538 } else if (check->Equals(isolate()->heap()->boolean_string())) { |
4539 __ CompareRoot(rax, Heap::kTrueValueRootIndex); | 4539 __ CompareRoot(rax, Heap::kTrueValueRootIndex); |
4540 __ j(equal, if_true); | 4540 __ j(equal, if_true); |
4541 __ CompareRoot(rax, Heap::kFalseValueRootIndex); | 4541 __ CompareRoot(rax, Heap::kFalseValueRootIndex); |
4542 Split(equal, if_true, if_false, fall_through); | 4542 Split(equal, if_true, if_false, fall_through); |
4543 } else if (FLAG_harmony_typeof && | 4543 } else if (FLAG_harmony_typeof && |
4544 check->Equals(isolate()->heap()->null_string())) { | 4544 check->Equals(isolate()->heap()->null_string())) { |
4545 __ CompareRoot(rax, Heap::kNullValueRootIndex); | 4545 __ CompareRoot(rax, Heap::kNullValueRootIndex); |
4546 Split(equal, if_true, if_false, fall_through); | 4546 Split(equal, if_true, if_false, fall_through); |
4547 } else if (check->Equals(isolate()->heap()->undefined_string())) { | 4547 } else if (check->Equals(isolate()->heap()->undefined_string())) { |
4548 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); | 4548 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
4549 __ j(equal, if_true); | 4549 __ j(equal, if_true); |
4550 __ JumpIfSmi(rax, if_false); | 4550 __ JumpIfSmi(rax, if_false); |
4551 // Check for undetectable objects => true. | 4551 // Check for undetectable objects => true. |
4552 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); | 4552 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset)); |
4553 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), | 4553 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), |
4554 Immediate(1 << Map::kIsUndetectable)); | 4554 Immediate(1 << Map::kIsUndetectable)); |
4555 Split(not_zero, if_true, if_false, fall_through); | 4555 Split(not_zero, if_true, if_false, fall_through); |
4556 } else if (check->Equals(isolate()->heap()->function_string())) { | 4556 } else if (check->Equals(isolate()->heap()->function_string())) { |
4557 __ JumpIfSmi(rax, if_false); | 4557 __ JumpIfSmi(rax, if_false); |
4558 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 4558 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
4559 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx); | 4559 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx); |
4560 __ j(equal, if_true); | 4560 __ j(equal, if_true); |
4561 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE); | 4561 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE); |
4562 Split(equal, if_true, if_false, fall_through); | 4562 Split(equal, if_true, if_false, fall_through); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4622 | 4622 |
4623 default: { | 4623 default: { |
4624 VisitForAccumulatorValue(expr->right()); | 4624 VisitForAccumulatorValue(expr->right()); |
4625 Condition cc = CompareIC::ComputeCondition(op); | 4625 Condition cc = CompareIC::ComputeCondition(op); |
4626 __ pop(rdx); | 4626 __ pop(rdx); |
4627 | 4627 |
4628 bool inline_smi_code = ShouldInlineSmiCase(op); | 4628 bool inline_smi_code = ShouldInlineSmiCase(op); |
4629 JumpPatchSite patch_site(masm_); | 4629 JumpPatchSite patch_site(masm_); |
4630 if (inline_smi_code) { | 4630 if (inline_smi_code) { |
4631 Label slow_case; | 4631 Label slow_case; |
4632 __ movq(rcx, rdx); | 4632 __ movp(rcx, rdx); |
4633 __ or_(rcx, rax); | 4633 __ or_(rcx, rax); |
4634 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); | 4634 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); |
4635 __ cmpq(rdx, rax); | 4635 __ cmpq(rdx, rax); |
4636 Split(cc, if_true, if_false, NULL); | 4636 Split(cc, if_true, if_false, NULL); |
4637 __ bind(&slow_case); | 4637 __ bind(&slow_case); |
4638 } | 4638 } |
4639 | 4639 |
4640 // Record position and call the compare IC. | 4640 // Record position and call the compare IC. |
4641 SetSourcePosition(expr->position()); | 4641 SetSourcePosition(expr->position()); |
4642 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | 4642 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4677 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); | 4677 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); |
4678 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId()); | 4678 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId()); |
4679 __ testq(rax, rax); | 4679 __ testq(rax, rax); |
4680 Split(not_zero, if_true, if_false, fall_through); | 4680 Split(not_zero, if_true, if_false, fall_through); |
4681 } | 4681 } |
4682 context()->Plug(if_true, if_false); | 4682 context()->Plug(if_true, if_false); |
4683 } | 4683 } |
4684 | 4684 |
4685 | 4685 |
4686 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 4686 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
4687 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 4687 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
4688 context()->Plug(rax); | 4688 context()->Plug(rax); |
4689 } | 4689 } |
4690 | 4690 |
4691 | 4691 |
4692 Register FullCodeGenerator::result_register() { | 4692 Register FullCodeGenerator::result_register() { |
4693 return rax; | 4693 return rax; |
4694 } | 4694 } |
4695 | 4695 |
4696 | 4696 |
4697 Register FullCodeGenerator::context_register() { | 4697 Register FullCodeGenerator::context_register() { |
4698 return rsi; | 4698 return rsi; |
4699 } | 4699 } |
4700 | 4700 |
4701 | 4701 |
4702 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 4702 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
4703 ASSERT(IsAligned(frame_offset, kPointerSize)); | 4703 ASSERT(IsAligned(frame_offset, kPointerSize)); |
4704 __ movq(Operand(rbp, frame_offset), value); | 4704 __ movp(Operand(rbp, frame_offset), value); |
4705 } | 4705 } |
4706 | 4706 |
4707 | 4707 |
4708 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 4708 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
4709 __ movq(dst, ContextOperand(rsi, context_index)); | 4709 __ movp(dst, ContextOperand(rsi, context_index)); |
4710 } | 4710 } |
4711 | 4711 |
4712 | 4712 |
4713 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | 4713 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { |
4714 Scope* declaration_scope = scope()->DeclarationScope(); | 4714 Scope* declaration_scope = scope()->DeclarationScope(); |
4715 if (declaration_scope->is_global_scope() || | 4715 if (declaration_scope->is_global_scope() || |
4716 declaration_scope->is_module_scope()) { | 4716 declaration_scope->is_module_scope()) { |
4717 // Contexts nested in the native context have a canonical empty function | 4717 // Contexts nested in the native context have a canonical empty function |
4718 // as their closure, not the anonymous closure containing the global | 4718 // as their closure, not the anonymous closure containing the global |
4719 // code. Pass a smi sentinel and let the runtime look up the empty | 4719 // code. Pass a smi sentinel and let the runtime look up the empty |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4808 int* context_length) { | 4808 int* context_length) { |
4809 // The macros used here must preserve the result register. | 4809 // The macros used here must preserve the result register. |
4810 | 4810 |
4811 // Because the handler block contains the context of the finally | 4811 // Because the handler block contains the context of the finally |
4812 // code, we can restore it directly from there for the finally code | 4812 // code, we can restore it directly from there for the finally code |
4813 // rather than iteratively unwinding contexts via their previous | 4813 // rather than iteratively unwinding contexts via their previous |
4814 // links. | 4814 // links. |
4815 __ Drop(*stack_depth); // Down to the handler block. | 4815 __ Drop(*stack_depth); // Down to the handler block. |
4816 if (*context_length > 0) { | 4816 if (*context_length > 0) { |
4817 // Restore the context to its dedicated register and the stack. | 4817 // Restore the context to its dedicated register and the stack. |
4818 __ movq(rsi, Operand(rsp, StackHandlerConstants::kContextOffset)); | 4818 __ movp(rsi, Operand(rsp, StackHandlerConstants::kContextOffset)); |
4819 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); | 4819 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); |
4820 } | 4820 } |
4821 __ PopTryHandler(); | 4821 __ PopTryHandler(); |
4822 __ call(finally_entry_); | 4822 __ call(finally_entry_); |
4823 | 4823 |
4824 *stack_depth = 0; | 4824 *stack_depth = 0; |
4825 *context_length = 0; | 4825 *context_length = 0; |
4826 return previous_; | 4826 return previous_; |
4827 } | 4827 } |
4828 | 4828 |
4829 | 4829 |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4900 | 4900 |
4901 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), | 4901 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), |
4902 Assembler::target_address_at(call_target_address)); | 4902 Assembler::target_address_at(call_target_address)); |
4903 return OSR_AFTER_STACK_CHECK; | 4903 return OSR_AFTER_STACK_CHECK; |
4904 } | 4904 } |
4905 | 4905 |
4906 | 4906 |
4907 } } // namespace v8::internal | 4907 } } // namespace v8::internal |
4908 | 4908 |
4909 #endif // V8_TARGET_ARCH_X64 | 4909 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |