Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(181)

Side by Side Diff: src/full-codegen/mips/full-codegen-mips.cc

Issue 1969423002: [Interpreter] Remove InterpreterExitTrampoline and replace with returning to the entry trampoline. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix typo on Arm64 Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 // Note on Mips implementation: 7 // Note on Mips implementation:
8 // 8 //
9 // The result_register() for mips is the 'v0' register, which is defined 9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first 10 // by the ABI to contain function return values. However, the first
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after
179 // Possibly allocate a local context. 179 // Possibly allocate a local context.
180 if (info->scope()->num_heap_slots() > 0) { 180 if (info->scope()->num_heap_slots() > 0) {
181 Comment cmnt(masm_, "[ Allocate context"); 181 Comment cmnt(masm_, "[ Allocate context");
182 // Argument to NewContext is the function, which is still in a1. 182 // Argument to NewContext is the function, which is still in a1.
183 bool need_write_barrier = true; 183 bool need_write_barrier = true;
184 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 184 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (info->scope()->is_script_scope()) { 185 if (info->scope()->is_script_scope()) {
186 __ push(a1); 186 __ push(a1);
187 __ Push(info->scope()->GetScopeInfo(info->isolate())); 187 __ Push(info->scope()->GetScopeInfo(info->isolate()));
188 __ CallRuntime(Runtime::kNewScriptContext); 188 __ CallRuntime(Runtime::kNewScriptContext);
189 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); 189 PrepareForBailoutForId(BailoutId::ScriptContext(),
190 Deoptimizer::BailoutState::TOS_REGISTER);
190 // The new target value is not used, clobbering is safe. 191 // The new target value is not used, clobbering is safe.
191 DCHECK_NULL(info->scope()->new_target_var()); 192 DCHECK_NULL(info->scope()->new_target_var());
192 } else { 193 } else {
193 if (info->scope()->new_target_var() != nullptr) { 194 if (info->scope()->new_target_var() != nullptr) {
194 __ push(a3); // Preserve new target. 195 __ push(a3); // Preserve new target.
195 } 196 }
196 if (slots <= FastNewContextStub::kMaximumSlots) { 197 if (slots <= FastNewContextStub::kMaximumSlots) {
197 FastNewContextStub stub(isolate(), slots); 198 FastNewContextStub stub(isolate(), slots);
198 __ CallStub(&stub); 199 __ CallStub(&stub);
199 // Result of FastNewContextStub is always in new space. 200 // Result of FastNewContextStub is always in new space.
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 __ Abort(kExpectedNewSpaceObject); 236 __ Abort(kExpectedNewSpaceObject);
236 __ bind(&done); 237 __ bind(&done);
237 } 238 }
238 } 239 }
239 } 240 }
240 } 241 }
241 242
242 // Register holding this function and new target are both trashed in case we 243 // Register holding this function and new target are both trashed in case we
243 // bailout here. But since that can happen only when new target is not used 244 // bailout here. But since that can happen only when new target is not used
244 // and we allocate a context, the value of |function_in_register| is correct. 245 // and we allocate a context, the value of |function_in_register| is correct.
245 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); 246 PrepareForBailoutForId(BailoutId::FunctionContext(),
247 Deoptimizer::BailoutState::NO_REGISTERS);
246 248
247 // Possibly set up a local binding to the this function which is used in 249 // Possibly set up a local binding to the this function which is used in
248 // derived constructors with super calls. 250 // derived constructors with super calls.
249 Variable* this_function_var = scope()->this_function_var(); 251 Variable* this_function_var = scope()->this_function_var();
250 if (this_function_var != nullptr) { 252 if (this_function_var != nullptr) {
251 Comment cmnt(masm_, "[ This function"); 253 Comment cmnt(masm_, "[ This function");
252 if (!function_in_register_a1) { 254 if (!function_in_register_a1) {
253 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 255 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // The write barrier clobbers register again, keep it marked as such. 256 // The write barrier clobbers register again, keep it marked as such.
255 } 257 }
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
298 300
299 SetVar(arguments, v0, a1, a2); 301 SetVar(arguments, v0, a1, a2);
300 } 302 }
301 303
302 if (FLAG_trace) { 304 if (FLAG_trace) {
303 __ CallRuntime(Runtime::kTraceEnter); 305 __ CallRuntime(Runtime::kTraceEnter);
304 } 306 }
305 307
306 // Visit the declarations and body unless there is an illegal 308 // Visit the declarations and body unless there is an illegal
307 // redeclaration. 309 // redeclaration.
308 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 310 PrepareForBailoutForId(BailoutId::FunctionEntry(),
311 Deoptimizer::BailoutState::NO_REGISTERS);
309 { 312 {
310 Comment cmnt(masm_, "[ Declarations"); 313 Comment cmnt(masm_, "[ Declarations");
311 VisitDeclarations(scope()->declarations()); 314 VisitDeclarations(scope()->declarations());
312 } 315 }
313 316
314 // Assert that the declarations do not use ICs. Otherwise the debugger 317 // Assert that the declarations do not use ICs. Otherwise the debugger
315 // won't be able to redirect a PC at an IC to the correct IC in newly 318 // won't be able to redirect a PC at an IC to the correct IC in newly
316 // recompiled code. 319 // recompiled code.
317 DCHECK_EQ(0, ic_total_count_); 320 DCHECK_EQ(0, ic_total_count_);
318 321
319 { 322 {
320 Comment cmnt(masm_, "[ Stack check"); 323 Comment cmnt(masm_, "[ Stack check");
321 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 324 PrepareForBailoutForId(BailoutId::Declarations(),
325 Deoptimizer::BailoutState::NO_REGISTERS);
322 Label ok; 326 Label ok;
323 __ LoadRoot(at, Heap::kStackLimitRootIndex); 327 __ LoadRoot(at, Heap::kStackLimitRootIndex);
324 __ Branch(&ok, hs, sp, Operand(at)); 328 __ Branch(&ok, hs, sp, Operand(at));
325 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 329 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
326 PredictableCodeSizeScope predictable( 330 PredictableCodeSizeScope predictable(
327 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); 331 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
328 __ Call(stack_check, RelocInfo::CODE_TARGET); 332 __ Call(stack_check, RelocInfo::CODE_TARGET);
329 __ bind(&ok); 333 __ bind(&ok);
330 } 334 }
331 335
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
390 __ beq(at, zero_reg, &ok); 394 __ beq(at, zero_reg, &ok);
391 // Call will emit a li t9 first, so it is safe to use the delay slot. 395 // Call will emit a li t9 first, so it is safe to use the delay slot.
392 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 396 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
393 // Record a mapping of this PC offset to the OSR id. This is used to find 397 // Record a mapping of this PC offset to the OSR id. This is used to find
394 // the AST id from the unoptimized code in order to use it as a key into 398 // the AST id from the unoptimized code in order to use it as a key into
395 // the deoptimization input data found in the optimized code. 399 // the deoptimization input data found in the optimized code.
396 RecordBackEdge(stmt->OsrEntryId()); 400 RecordBackEdge(stmt->OsrEntryId());
397 EmitProfilingCounterReset(); 401 EmitProfilingCounterReset();
398 402
399 __ bind(&ok); 403 __ bind(&ok);
400 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 404 PrepareForBailoutForId(stmt->EntryId(),
405 Deoptimizer::BailoutState::NO_REGISTERS);
401 // Record a mapping of the OSR id to this PC. This is used if the OSR 406 // Record a mapping of the OSR id to this PC. This is used if the OSR
402 // entry becomes the target of a bailout. We don't expect it to be, but 407 // entry becomes the target of a bailout. We don't expect it to be, but
403 // we want it to work if it is. 408 // we want it to work if it is.
404 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 409 PrepareForBailoutForId(stmt->OsrEntryId(),
410 Deoptimizer::BailoutState::NO_REGISTERS);
405 } 411 }
406 412
407 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( 413 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
408 bool is_tail_call) { 414 bool is_tail_call) {
409 // Pretend that the exit is a backwards jump to the entry. 415 // Pretend that the exit is a backwards jump to the entry.
410 int weight = 1; 416 int weight = 1;
411 if (info_->ShouldSelfOptimize()) { 417 if (info_->ShouldSelfOptimize()) {
412 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 418 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 } else { 419 } else {
414 int distance = masm_->pc_offset(); 420 int distance = masm_->pc_offset();
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
720 bool should_normalize, 726 bool should_normalize,
721 Label* if_true, 727 Label* if_true,
722 Label* if_false) { 728 Label* if_false) {
723 // Only prepare for bailouts before splits if we're in a test 729 // Only prepare for bailouts before splits if we're in a test
724 // context. Otherwise, we let the Visit function deal with the 730 // context. Otherwise, we let the Visit function deal with the
725 // preparation to avoid preparing with the same AST id twice. 731 // preparation to avoid preparing with the same AST id twice.
726 if (!context()->IsTest()) return; 732 if (!context()->IsTest()) return;
727 733
728 Label skip; 734 Label skip;
729 if (should_normalize) __ Branch(&skip); 735 if (should_normalize) __ Branch(&skip);
730 PrepareForBailout(expr, TOS_REG); 736 PrepareForBailout(expr, Deoptimizer::BailoutState::TOS_REGISTER);
731 if (should_normalize) { 737 if (should_normalize) {
732 __ LoadRoot(t0, Heap::kTrueValueRootIndex); 738 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
733 Split(eq, a0, Operand(t0), if_true, if_false, NULL); 739 Split(eq, a0, Operand(t0), if_true, if_false, NULL);
734 __ bind(&skip); 740 __ bind(&skip);
735 } 741 }
736 } 742 }
737 743
738 744
739 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 745 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
740 // The variable in the declaration always resides in the current function 746 // The variable in the declaration always resides in the current function
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
779 } 785 }
780 break; 786 break;
781 787
782 case VariableLocation::CONTEXT: 788 case VariableLocation::CONTEXT:
783 if (hole_init) { 789 if (hole_init) {
784 Comment cmnt(masm_, "[ VariableDeclaration"); 790 Comment cmnt(masm_, "[ VariableDeclaration");
785 EmitDebugCheckDeclarationContext(variable); 791 EmitDebugCheckDeclarationContext(variable);
786 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 792 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
787 __ sw(at, ContextMemOperand(cp, variable->index())); 793 __ sw(at, ContextMemOperand(cp, variable->index()));
788 // No write barrier since the_hole_value is in old space. 794 // No write barrier since the_hole_value is in old space.
789 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 795 PrepareForBailoutForId(proxy->id(),
796 Deoptimizer::BailoutState::NO_REGISTERS);
790 } 797 }
791 break; 798 break;
792 799
793 case VariableLocation::LOOKUP: { 800 case VariableLocation::LOOKUP: {
794 Comment cmnt(masm_, "[ VariableDeclaration"); 801 Comment cmnt(masm_, "[ VariableDeclaration");
795 __ li(a2, Operand(variable->name())); 802 __ li(a2, Operand(variable->name()));
796 // Declaration nodes are always introduced in one of four modes. 803 // Declaration nodes are always introduced in one of four modes.
797 DCHECK(IsDeclaredVariableMode(mode)); 804 DCHECK(IsDeclaredVariableMode(mode));
798 // Push initial value, if any. 805 // Push initial value, if any.
799 // Note: For variables we must not push an initial value (such as 806 // Note: For variables we must not push an initial value (such as
800 // 'undefined') because we may have a (legal) redeclaration and we 807 // 'undefined') because we may have a (legal) redeclaration and we
801 // must not destroy the current value. 808 // must not destroy the current value.
802 if (hole_init) { 809 if (hole_init) {
803 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 810 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
804 } else { 811 } else {
805 DCHECK(Smi::FromInt(0) == 0); 812 DCHECK(Smi::FromInt(0) == 0);
806 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 813 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
807 } 814 }
808 __ Push(a2, a0); 815 __ Push(a2, a0);
809 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); 816 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
810 __ CallRuntime(Runtime::kDeclareLookupSlot); 817 __ CallRuntime(Runtime::kDeclareLookupSlot);
811 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 818 PrepareForBailoutForId(proxy->id(),
819 Deoptimizer::BailoutState::NO_REGISTERS);
812 break; 820 break;
813 } 821 }
814 } 822 }
815 } 823 }
816 824
817 825
818 void FullCodeGenerator::VisitFunctionDeclaration( 826 void FullCodeGenerator::VisitFunctionDeclaration(
819 FunctionDeclaration* declaration) { 827 FunctionDeclaration* declaration) {
820 VariableProxy* proxy = declaration->proxy(); 828 VariableProxy* proxy = declaration->proxy();
821 Variable* variable = proxy->var(); 829 Variable* variable = proxy->var();
(...skipping 25 matching lines...) Expand all
847 int offset = Context::SlotOffset(variable->index()); 855 int offset = Context::SlotOffset(variable->index());
848 // We know that we have written a function, which is not a smi. 856 // We know that we have written a function, which is not a smi.
849 __ RecordWriteContextSlot(cp, 857 __ RecordWriteContextSlot(cp,
850 offset, 858 offset,
851 result_register(), 859 result_register(),
852 a2, 860 a2,
853 kRAHasBeenSaved, 861 kRAHasBeenSaved,
854 kDontSaveFPRegs, 862 kDontSaveFPRegs,
855 EMIT_REMEMBERED_SET, 863 EMIT_REMEMBERED_SET,
856 OMIT_SMI_CHECK); 864 OMIT_SMI_CHECK);
857 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 865 PrepareForBailoutForId(proxy->id(),
866 Deoptimizer::BailoutState::NO_REGISTERS);
858 break; 867 break;
859 } 868 }
860 869
861 case VariableLocation::LOOKUP: { 870 case VariableLocation::LOOKUP: {
862 Comment cmnt(masm_, "[ FunctionDeclaration"); 871 Comment cmnt(masm_, "[ FunctionDeclaration");
863 __ li(a2, Operand(variable->name())); 872 __ li(a2, Operand(variable->name()));
864 PushOperand(a2); 873 PushOperand(a2);
865 // Push initial value for function declaration. 874 // Push initial value for function declaration.
866 VisitForStackValue(declaration->fun()); 875 VisitForStackValue(declaration->fun());
867 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); 876 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
868 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); 877 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
869 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 878 PrepareForBailoutForId(proxy->id(),
879 Deoptimizer::BailoutState::NO_REGISTERS);
870 break; 880 break;
871 } 881 }
872 } 882 }
873 } 883 }
874 884
875 885
876 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 886 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
877 // Call the runtime to declare the globals. 887 // Call the runtime to declare the globals.
878 __ li(a1, Operand(pairs)); 888 __ li(a1, Operand(pairs));
879 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 889 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
(...skipping 11 matching lines...) Expand all
891 } 901 }
892 902
893 903
894 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 904 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
895 Comment cmnt(masm_, "[ SwitchStatement"); 905 Comment cmnt(masm_, "[ SwitchStatement");
896 Breakable nested_statement(this, stmt); 906 Breakable nested_statement(this, stmt);
897 SetStatementPosition(stmt); 907 SetStatementPosition(stmt);
898 908
899 // Keep the switch value on the stack until a case matches. 909 // Keep the switch value on the stack until a case matches.
900 VisitForStackValue(stmt->tag()); 910 VisitForStackValue(stmt->tag());
901 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 911 PrepareForBailoutForId(stmt->EntryId(),
912 Deoptimizer::BailoutState::NO_REGISTERS);
902 913
903 ZoneList<CaseClause*>* clauses = stmt->cases(); 914 ZoneList<CaseClause*>* clauses = stmt->cases();
904 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 915 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
905 916
906 Label next_test; // Recycled for each test. 917 Label next_test; // Recycled for each test.
907 // Compile all the tests with branches to their bodies. 918 // Compile all the tests with branches to their bodies.
908 for (int i = 0; i < clauses->length(); i++) { 919 for (int i = 0; i < clauses->length(); i++) {
909 CaseClause* clause = clauses->at(i); 920 CaseClause* clause = clauses->at(i);
910 clause->body_target()->Unuse(); 921 clause->body_target()->Unuse();
911 922
(...skipping 29 matching lines...) Expand all
941 952
942 // Record position before stub call for type feedback. 953 // Record position before stub call for type feedback.
943 SetExpressionPosition(clause); 954 SetExpressionPosition(clause);
944 Handle<Code> ic = 955 Handle<Code> ic =
945 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 956 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
946 CallIC(ic, clause->CompareId()); 957 CallIC(ic, clause->CompareId());
947 patch_site.EmitPatchInfo(); 958 patch_site.EmitPatchInfo();
948 959
949 Label skip; 960 Label skip;
950 __ Branch(&skip); 961 __ Branch(&skip);
951 PrepareForBailout(clause, TOS_REG); 962 PrepareForBailout(clause, Deoptimizer::BailoutState::TOS_REGISTER);
952 __ LoadRoot(at, Heap::kTrueValueRootIndex); 963 __ LoadRoot(at, Heap::kTrueValueRootIndex);
953 __ Branch(&next_test, ne, v0, Operand(at)); 964 __ Branch(&next_test, ne, v0, Operand(at));
954 __ Drop(1); 965 __ Drop(1);
955 __ Branch(clause->body_target()); 966 __ Branch(clause->body_target());
956 __ bind(&skip); 967 __ bind(&skip);
957 968
958 __ Branch(&next_test, ne, v0, Operand(zero_reg)); 969 __ Branch(&next_test, ne, v0, Operand(zero_reg));
959 __ Drop(1); // Switch value is no longer needed. 970 __ Drop(1); // Switch value is no longer needed.
960 __ Branch(clause->body_target()); 971 __ Branch(clause->body_target());
961 } 972 }
962 973
963 // Discard the test value and jump to the default if present, otherwise to 974 // Discard the test value and jump to the default if present, otherwise to
964 // the end of the statement. 975 // the end of the statement.
965 __ bind(&next_test); 976 __ bind(&next_test);
966 DropOperands(1); // Switch value is no longer needed. 977 DropOperands(1); // Switch value is no longer needed.
967 if (default_clause == NULL) { 978 if (default_clause == NULL) {
968 __ Branch(nested_statement.break_label()); 979 __ Branch(nested_statement.break_label());
969 } else { 980 } else {
970 __ Branch(default_clause->body_target()); 981 __ Branch(default_clause->body_target());
971 } 982 }
972 983
973 // Compile all the case bodies. 984 // Compile all the case bodies.
974 for (int i = 0; i < clauses->length(); i++) { 985 for (int i = 0; i < clauses->length(); i++) {
975 Comment cmnt(masm_, "[ Case body"); 986 Comment cmnt(masm_, "[ Case body");
976 CaseClause* clause = clauses->at(i); 987 CaseClause* clause = clauses->at(i);
977 __ bind(clause->body_target()); 988 __ bind(clause->body_target());
978 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 989 PrepareForBailoutForId(clause->EntryId(),
990 Deoptimizer::BailoutState::NO_REGISTERS);
979 VisitStatements(clause->statements()); 991 VisitStatements(clause->statements());
980 } 992 }
981 993
982 __ bind(nested_statement.break_label()); 994 __ bind(nested_statement.break_label());
983 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 995 PrepareForBailoutForId(stmt->ExitId(),
996 Deoptimizer::BailoutState::NO_REGISTERS);
984 } 997 }
985 998
986 999
987 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1000 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
988 Comment cmnt(masm_, "[ ForInStatement"); 1001 Comment cmnt(masm_, "[ ForInStatement");
989 SetStatementPosition(stmt, SKIP_BREAK); 1002 SetStatementPosition(stmt, SKIP_BREAK);
990 1003
991 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); 1004 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
992 1005
993 // Get the object to enumerate over. 1006 // Get the object to enumerate over.
(...skipping 15 matching lines...) Expand all
1009 Operand(FIRST_JS_RECEIVER_TYPE)); 1022 Operand(FIRST_JS_RECEIVER_TYPE));
1010 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot. 1023 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1011 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at)); 1024 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1012 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot. 1025 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1013 __ Branch(&exit, eq, a0, Operand(at)); 1026 __ Branch(&exit, eq, a0, Operand(at));
1014 __ bind(&convert); 1027 __ bind(&convert);
1015 ToObjectStub stub(isolate()); 1028 ToObjectStub stub(isolate());
1016 __ CallStub(&stub); 1029 __ CallStub(&stub);
1017 __ mov(a0, v0); 1030 __ mov(a0, v0);
1018 __ bind(&done_convert); 1031 __ bind(&done_convert);
1019 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); 1032 PrepareForBailoutForId(stmt->ToObjectId(),
1033 Deoptimizer::BailoutState::TOS_REGISTER);
1020 __ push(a0); 1034 __ push(a0);
1021 1035
1022 // Check cache validity in generated code. If we cannot guarantee cache 1036 // Check cache validity in generated code. If we cannot guarantee cache
1023 // validity, call the runtime system to check cache validity or get the 1037 // validity, call the runtime system to check cache validity or get the
1024 // property names in a fixed array. Note: Proxies never have an enum cache, 1038 // property names in a fixed array. Note: Proxies never have an enum cache,
1025 // so will always take the slow path. 1039 // so will always take the slow path.
1026 Label call_runtime; 1040 Label call_runtime;
1027 __ CheckEnumCache(&call_runtime); 1041 __ CheckEnumCache(&call_runtime);
1028 1042
1029 // The enum cache is valid. Load the map of the object being 1043 // The enum cache is valid. Load the map of the object being
1030 // iterated over and use the cache for the iteration. 1044 // iterated over and use the cache for the iteration.
1031 Label use_cache; 1045 Label use_cache;
1032 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); 1046 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1033 __ Branch(&use_cache); 1047 __ Branch(&use_cache);
1034 1048
1035 // Get the set of properties to enumerate. 1049 // Get the set of properties to enumerate.
1036 __ bind(&call_runtime); 1050 __ bind(&call_runtime);
1037 __ push(a0); // Duplicate the enumerable object on the stack. 1051 __ push(a0); // Duplicate the enumerable object on the stack.
1038 __ CallRuntime(Runtime::kForInEnumerate); 1052 __ CallRuntime(Runtime::kForInEnumerate);
1039 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); 1053 PrepareForBailoutForId(stmt->EnumId(),
1054 Deoptimizer::BailoutState::TOS_REGISTER);
1040 1055
1041 // If we got a map from the runtime call, we can do a fast 1056 // If we got a map from the runtime call, we can do a fast
1042 // modification check. Otherwise, we got a fixed array, and we have 1057 // modification check. Otherwise, we got a fixed array, and we have
1043 // to do a slow check. 1058 // to do a slow check.
1044 Label fixed_array; 1059 Label fixed_array;
1045 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 1060 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1046 __ LoadRoot(at, Heap::kMetaMapRootIndex); 1061 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1047 __ Branch(&fixed_array, ne, a2, Operand(at)); 1062 __ Branch(&fixed_array, ne, a2, Operand(at));
1048 1063
1049 // We got a map in register v0. Get the enumeration cache from it. 1064 // We got a map in register v0. Get the enumeration cache from it.
(...skipping 17 matching lines...) Expand all
1067 __ Drop(1); 1082 __ Drop(1);
1068 __ jmp(&exit); 1083 __ jmp(&exit);
1069 1084
1070 // We got a fixed array in register v0. Iterate through that. 1085 // We got a fixed array in register v0. Iterate through that.
1071 __ bind(&fixed_array); 1086 __ bind(&fixed_array);
1072 1087
1073 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check 1088 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1074 __ Push(a1, v0); // Smi and array 1089 __ Push(a1, v0); // Smi and array
1075 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); 1090 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1076 __ Push(a1); // Fixed array length (as smi). 1091 __ Push(a1); // Fixed array length (as smi).
1077 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); 1092 PrepareForBailoutForId(stmt->PrepareId(),
1093 Deoptimizer::BailoutState::NO_REGISTERS);
1078 __ li(a0, Operand(Smi::FromInt(0))); 1094 __ li(a0, Operand(Smi::FromInt(0)));
1079 __ Push(a0); // Initial index. 1095 __ Push(a0); // Initial index.
1080 1096
1081 // Generate code for doing the condition check. 1097 // Generate code for doing the condition check.
1082 __ bind(&loop); 1098 __ bind(&loop);
1083 SetExpressionAsStatementPosition(stmt->each()); 1099 SetExpressionAsStatementPosition(stmt->each());
1084 1100
1085 // Load the current count to a0, load the length to a1. 1101 // Load the current count to a0, load the length to a1.
1086 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); 1102 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1087 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); 1103 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
(...skipping 20 matching lines...) Expand all
1108 int const vector_index = SmiFromSlot(slot)->value(); 1124 int const vector_index = SmiFromSlot(slot)->value();
1109 __ EmitLoadTypeFeedbackVector(a0); 1125 __ EmitLoadTypeFeedbackVector(a0);
1110 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1126 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1111 __ sw(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index))); 1127 __ sw(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1112 1128
1113 // Convert the entry to a string or (smi) 0 if it isn't a property 1129 // Convert the entry to a string or (smi) 0 if it isn't a property
1114 // any more. If the property has been removed while iterating, we 1130 // any more. If the property has been removed while iterating, we
1115 // just skip it. 1131 // just skip it.
1116 __ Push(a1, a3); // Enumerable and current entry. 1132 __ Push(a1, a3); // Enumerable and current entry.
1117 __ CallRuntime(Runtime::kForInFilter); 1133 __ CallRuntime(Runtime::kForInFilter);
1118 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); 1134 PrepareForBailoutForId(stmt->FilterId(),
1135 Deoptimizer::BailoutState::TOS_REGISTER);
1119 __ mov(a3, result_register()); 1136 __ mov(a3, result_register());
1120 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1137 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1121 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at)); 1138 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1122 1139
1123 // Update the 'each' property or variable from the possibly filtered 1140 // Update the 'each' property or variable from the possibly filtered
1124 // entry in register a3. 1141 // entry in register a3.
1125 __ bind(&update_each); 1142 __ bind(&update_each);
1126 __ mov(result_register(), a3); 1143 __ mov(result_register(), a3);
1127 // Perform the assignment as if via '='. 1144 // Perform the assignment as if via '='.
1128 { EffectContext context(this); 1145 { EffectContext context(this);
1129 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); 1146 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1130 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); 1147 PrepareForBailoutForId(stmt->AssignmentId(),
1148 Deoptimizer::BailoutState::NO_REGISTERS);
1131 } 1149 }
1132 1150
1133 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). 1151 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1134 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1152 PrepareForBailoutForId(stmt->BodyId(),
1153 Deoptimizer::BailoutState::NO_REGISTERS);
1135 // Generate code for the body of the loop. 1154 // Generate code for the body of the loop.
1136 Visit(stmt->body()); 1155 Visit(stmt->body());
1137 1156
1138 // Generate code for the going to the next element by incrementing 1157 // Generate code for the going to the next element by incrementing
1139 // the index (smi) stored on top of the stack. 1158 // the index (smi) stored on top of the stack.
1140 __ bind(loop_statement.continue_label()); 1159 __ bind(loop_statement.continue_label());
1141 __ pop(a0); 1160 __ pop(a0);
1142 __ Addu(a0, a0, Operand(Smi::FromInt(1))); 1161 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1143 __ push(a0); 1162 __ push(a0);
1144 1163
1145 EmitBackEdgeBookkeeping(stmt, &loop); 1164 EmitBackEdgeBookkeeping(stmt, &loop);
1146 __ Branch(&loop); 1165 __ Branch(&loop);
1147 1166
1148 // Remove the pointers stored on the stack. 1167 // Remove the pointers stored on the stack.
1149 __ bind(loop_statement.break_label()); 1168 __ bind(loop_statement.break_label());
1150 DropOperands(5); 1169 DropOperands(5);
1151 1170
1152 // Exit and decrement the loop depth. 1171 // Exit and decrement the loop depth.
1153 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1172 PrepareForBailoutForId(stmt->ExitId(),
1173 Deoptimizer::BailoutState::NO_REGISTERS);
1154 __ bind(&exit); 1174 __ bind(&exit);
1155 decrement_loop_depth(); 1175 decrement_loop_depth();
1156 } 1176 }
1157 1177
1158 1178
1159 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1179 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1160 FeedbackVectorSlot slot) { 1180 FeedbackVectorSlot slot) {
1161 DCHECK(NeedsHomeObject(initializer)); 1181 DCHECK(NeedsHomeObject(initializer));
1162 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1182 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1163 __ li(StoreDescriptor::NameRegister(), 1183 __ li(StoreDescriptor::NameRegister(),
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
1302 __ li(LoadDescriptor::SlotRegister(), 1322 __ li(LoadDescriptor::SlotRegister(),
1303 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); 1323 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1304 CallLoadIC(typeof_mode); 1324 CallLoadIC(typeof_mode);
1305 } 1325 }
1306 1326
1307 1327
1308 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1328 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1309 TypeofMode typeof_mode) { 1329 TypeofMode typeof_mode) {
1310 // Record position before possible IC call. 1330 // Record position before possible IC call.
1311 SetExpressionPosition(proxy); 1331 SetExpressionPosition(proxy);
1312 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); 1332 PrepareForBailoutForId(proxy->BeforeId(),
1333 Deoptimizer::BailoutState::NO_REGISTERS);
1313 Variable* var = proxy->var(); 1334 Variable* var = proxy->var();
1314 1335
1315 // Three cases: global variables, lookup variables, and all other types of 1336 // Three cases: global variables, lookup variables, and all other types of
1316 // variables. 1337 // variables.
1317 switch (var->location()) { 1338 switch (var->location()) {
1318 case VariableLocation::GLOBAL: 1339 case VariableLocation::GLOBAL:
1319 case VariableLocation::UNALLOCATED: { 1340 case VariableLocation::UNALLOCATED: {
1320 Comment cmnt(masm_, "[ Global variable"); 1341 Comment cmnt(masm_, "[ Global variable");
1321 EmitGlobalVariableLoad(proxy, typeof_mode); 1342 EmitGlobalVariableLoad(proxy, typeof_mode);
1322 context()->Plug(v0); 1343 context()->Plug(v0);
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1409 __ li(a1, Operand(constant_properties)); 1430 __ li(a1, Operand(constant_properties));
1410 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); 1431 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1411 if (MustCreateObjectLiteralWithRuntime(expr)) { 1432 if (MustCreateObjectLiteralWithRuntime(expr)) {
1412 __ Push(a3, a2, a1, a0); 1433 __ Push(a3, a2, a1, a0);
1413 __ CallRuntime(Runtime::kCreateObjectLiteral); 1434 __ CallRuntime(Runtime::kCreateObjectLiteral);
1414 } else { 1435 } else {
1415 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); 1436 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1416 __ CallStub(&stub); 1437 __ CallStub(&stub);
1417 RestoreContext(); 1438 RestoreContext();
1418 } 1439 }
1419 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1440 PrepareForBailoutForId(expr->CreateLiteralId(),
1441 Deoptimizer::BailoutState::TOS_REGISTER);
1420 1442
1421 // If result_saved is true the result is on top of the stack. If 1443 // If result_saved is true the result is on top of the stack. If
1422 // result_saved is false the result is in v0. 1444 // result_saved is false the result is in v0.
1423 bool result_saved = false; 1445 bool result_saved = false;
1424 1446
1425 AccessorTable accessor_table(zone()); 1447 AccessorTable accessor_table(zone());
1426 int property_index = 0; 1448 int property_index = 0;
1427 for (; property_index < expr->properties()->length(); property_index++) { 1449 for (; property_index < expr->properties()->length(); property_index++) {
1428 ObjectLiteral::Property* property = expr->properties()->at(property_index); 1450 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1429 if (property->is_computed_name()) break; 1451 if (property->is_computed_name()) break;
(...skipping 16 matching lines...) Expand all
1446 // contains computed properties with an uninitialized value. 1468 // contains computed properties with an uninitialized value.
1447 if (key->value()->IsInternalizedString()) { 1469 if (key->value()->IsInternalizedString()) {
1448 if (property->emit_store()) { 1470 if (property->emit_store()) {
1449 VisitForAccumulatorValue(value); 1471 VisitForAccumulatorValue(value);
1450 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1472 __ mov(StoreDescriptor::ValueRegister(), result_register());
1451 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 1473 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1452 __ li(StoreDescriptor::NameRegister(), Operand(key->value())); 1474 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1453 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1475 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1454 EmitLoadStoreICSlot(property->GetSlot(0)); 1476 EmitLoadStoreICSlot(property->GetSlot(0));
1455 CallStoreIC(); 1477 CallStoreIC();
1456 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1478 PrepareForBailoutForId(key->id(),
1479 Deoptimizer::BailoutState::NO_REGISTERS);
1457 1480
1458 if (NeedsHomeObject(value)) { 1481 if (NeedsHomeObject(value)) {
1459 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1482 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1460 } 1483 }
1461 } else { 1484 } else {
1462 VisitForEffect(value); 1485 VisitForEffect(value);
1463 } 1486 }
1464 break; 1487 break;
1465 } 1488 }
1466 // Duplicate receiver on stack. 1489 // Duplicate receiver on stack.
(...skipping 13 matching lines...) Expand all
1480 } 1503 }
1481 break; 1504 break;
1482 case ObjectLiteral::Property::PROTOTYPE: 1505 case ObjectLiteral::Property::PROTOTYPE:
1483 // Duplicate receiver on stack. 1506 // Duplicate receiver on stack.
1484 __ lw(a0, MemOperand(sp)); 1507 __ lw(a0, MemOperand(sp));
1485 PushOperand(a0); 1508 PushOperand(a0);
1486 VisitForStackValue(value); 1509 VisitForStackValue(value);
1487 DCHECK(property->emit_store()); 1510 DCHECK(property->emit_store());
1488 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1511 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1489 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1512 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1490 NO_REGISTERS); 1513 Deoptimizer::BailoutState::NO_REGISTERS);
1491 break; 1514 break;
1492 case ObjectLiteral::Property::GETTER: 1515 case ObjectLiteral::Property::GETTER:
1493 if (property->emit_store()) { 1516 if (property->emit_store()) {
1494 accessor_table.lookup(key)->second->getter = property; 1517 accessor_table.lookup(key)->second->getter = property;
1495 } 1518 }
1496 break; 1519 break;
1497 case ObjectLiteral::Property::SETTER: 1520 case ObjectLiteral::Property::SETTER:
1498 if (property->emit_store()) { 1521 if (property->emit_store()) {
1499 accessor_table.lookup(key)->second->setter = property; 1522 accessor_table.lookup(key)->second->setter = property;
1500 } 1523 }
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1537 1560
1538 __ lw(a0, MemOperand(sp)); // Duplicate receiver. 1561 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1539 PushOperand(a0); 1562 PushOperand(a0);
1540 1563
1541 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { 1564 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1542 DCHECK(!property->is_computed_name()); 1565 DCHECK(!property->is_computed_name());
1543 VisitForStackValue(value); 1566 VisitForStackValue(value);
1544 DCHECK(property->emit_store()); 1567 DCHECK(property->emit_store());
1545 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1568 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1546 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1569 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1547 NO_REGISTERS); 1570 Deoptimizer::BailoutState::NO_REGISTERS);
1548 } else { 1571 } else {
1549 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); 1572 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1550 VisitForStackValue(value); 1573 VisitForStackValue(value);
1551 if (NeedsHomeObject(value)) { 1574 if (NeedsHomeObject(value)) {
1552 EmitSetHomeObject(value, 2, property->GetSlot()); 1575 EmitSetHomeObject(value, 2, property->GetSlot());
1553 } 1576 }
1554 1577
1555 switch (property->kind()) { 1578 switch (property->kind()) {
1556 case ObjectLiteral::Property::CONSTANT: 1579 case ObjectLiteral::Property::CONSTANT:
1557 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1580 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1609 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1632 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1610 __ li(a1, Operand(constant_elements)); 1633 __ li(a1, Operand(constant_elements));
1611 if (MustCreateArrayLiteralWithRuntime(expr)) { 1634 if (MustCreateArrayLiteralWithRuntime(expr)) {
1612 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); 1635 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1613 __ Push(a3, a2, a1, a0); 1636 __ Push(a3, a2, a1, a0);
1614 __ CallRuntime(Runtime::kCreateArrayLiteral); 1637 __ CallRuntime(Runtime::kCreateArrayLiteral);
1615 } else { 1638 } else {
1616 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1639 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1617 __ CallStub(&stub); 1640 __ CallStub(&stub);
1618 } 1641 }
1619 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1642 PrepareForBailoutForId(expr->CreateLiteralId(),
1643 Deoptimizer::BailoutState::TOS_REGISTER);
1620 1644
1621 bool result_saved = false; // Is the result saved to the stack? 1645 bool result_saved = false; // Is the result saved to the stack?
1622 ZoneList<Expression*>* subexprs = expr->values(); 1646 ZoneList<Expression*>* subexprs = expr->values();
1623 int length = subexprs->length(); 1647 int length = subexprs->length();
1624 1648
1625 // Emit code to evaluate all the non-constant subexpressions and to store 1649 // Emit code to evaluate all the non-constant subexpressions and to store
1626 // them into the newly cloned array. 1650 // them into the newly cloned array.
1627 int array_index = 0; 1651 int array_index = 0;
1628 for (; array_index < length; array_index++) { 1652 for (; array_index < length; array_index++) {
1629 Expression* subexpr = subexprs->at(array_index); 1653 Expression* subexpr = subexprs->at(array_index);
(...skipping 11 matching lines...) Expand all
1641 VisitForAccumulatorValue(subexpr); 1665 VisitForAccumulatorValue(subexpr);
1642 1666
1643 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index))); 1667 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1644 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1668 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1645 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1669 __ mov(StoreDescriptor::ValueRegister(), result_register());
1646 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); 1670 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1647 Handle<Code> ic = 1671 Handle<Code> ic =
1648 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 1672 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1649 CallIC(ic); 1673 CallIC(ic);
1650 1674
1651 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); 1675 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1676 Deoptimizer::BailoutState::NO_REGISTERS);
1652 } 1677 }
1653 1678
1654 // In case the array literal contains spread expressions it has two parts. The 1679 // In case the array literal contains spread expressions it has two parts. The
1655 // first part is the "static" array which has a literal index is handled 1680 // first part is the "static" array which has a literal index is handled
1656 // above. The second part is the part after the first spread expression 1681 // above. The second part is the part after the first spread expression
1657 // (inclusive) and these elements gets appended to the array. Note that the 1682 // (inclusive) and these elements gets appended to the array. Note that the
1658 // number elements an iterable produces is unknown ahead of time. 1683 // number elements an iterable produces is unknown ahead of time.
1659 if (array_index < length && result_saved) { 1684 if (array_index < length && result_saved) {
1660 PopOperand(v0); 1685 PopOperand(v0);
1661 result_saved = false; 1686 result_saved = false;
1662 } 1687 }
1663 for (; array_index < length; array_index++) { 1688 for (; array_index < length; array_index++) {
1664 Expression* subexpr = subexprs->at(array_index); 1689 Expression* subexpr = subexprs->at(array_index);
1665 1690
1666 PushOperand(v0); 1691 PushOperand(v0);
1667 DCHECK(!subexpr->IsSpread()); 1692 DCHECK(!subexpr->IsSpread());
1668 VisitForStackValue(subexpr); 1693 VisitForStackValue(subexpr);
1669 CallRuntimeWithOperands(Runtime::kAppendElement); 1694 CallRuntimeWithOperands(Runtime::kAppendElement);
1670 1695
1671 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); 1696 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1697 Deoptimizer::BailoutState::NO_REGISTERS);
1672 } 1698 }
1673 1699
1674 if (result_saved) { 1700 if (result_saved) {
1675 context()->PlugTOS(); 1701 context()->PlugTOS();
1676 } else { 1702 } else {
1677 context()->Plug(v0); 1703 context()->Plug(v0);
1678 } 1704 }
1679 } 1705 }
1680 1706
1681 1707
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1744 break; 1770 break;
1745 } 1771 }
1746 1772
1747 // For compound assignments we need another deoptimization point after the 1773 // For compound assignments we need another deoptimization point after the
1748 // variable/property load. 1774 // variable/property load.
1749 if (expr->is_compound()) { 1775 if (expr->is_compound()) {
1750 { AccumulatorValueContext context(this); 1776 { AccumulatorValueContext context(this);
1751 switch (assign_type) { 1777 switch (assign_type) {
1752 case VARIABLE: 1778 case VARIABLE:
1753 EmitVariableLoad(expr->target()->AsVariableProxy()); 1779 EmitVariableLoad(expr->target()->AsVariableProxy());
1754 PrepareForBailout(expr->target(), TOS_REG); 1780 PrepareForBailout(expr->target(),
1781 Deoptimizer::BailoutState::TOS_REGISTER);
1755 break; 1782 break;
1756 case NAMED_PROPERTY: 1783 case NAMED_PROPERTY:
1757 EmitNamedPropertyLoad(property); 1784 EmitNamedPropertyLoad(property);
1758 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1785 PrepareForBailoutForId(property->LoadId(),
1786 Deoptimizer::BailoutState::TOS_REGISTER);
1759 break; 1787 break;
1760 case NAMED_SUPER_PROPERTY: 1788 case NAMED_SUPER_PROPERTY:
1761 EmitNamedSuperPropertyLoad(property); 1789 EmitNamedSuperPropertyLoad(property);
1762 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1790 PrepareForBailoutForId(property->LoadId(),
1791 Deoptimizer::BailoutState::TOS_REGISTER);
1763 break; 1792 break;
1764 case KEYED_SUPER_PROPERTY: 1793 case KEYED_SUPER_PROPERTY:
1765 EmitKeyedSuperPropertyLoad(property); 1794 EmitKeyedSuperPropertyLoad(property);
1766 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1795 PrepareForBailoutForId(property->LoadId(),
1796 Deoptimizer::BailoutState::TOS_REGISTER);
1767 break; 1797 break;
1768 case KEYED_PROPERTY: 1798 case KEYED_PROPERTY:
1769 EmitKeyedPropertyLoad(property); 1799 EmitKeyedPropertyLoad(property);
1770 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1800 PrepareForBailoutForId(property->LoadId(),
1801 Deoptimizer::BailoutState::TOS_REGISTER);
1771 break; 1802 break;
1772 } 1803 }
1773 } 1804 }
1774 1805
1775 Token::Value op = expr->binary_op(); 1806 Token::Value op = expr->binary_op();
1776 PushOperand(v0); // Left operand goes on the stack. 1807 PushOperand(v0); // Left operand goes on the stack.
1777 VisitForAccumulatorValue(expr->value()); 1808 VisitForAccumulatorValue(expr->value());
1778 1809
1779 AccumulatorValueContext context(this); 1810 AccumulatorValueContext context(this);
1780 if (ShouldInlineSmiCase(op)) { 1811 if (ShouldInlineSmiCase(op)) {
1781 EmitInlineSmiBinaryOp(expr->binary_operation(), 1812 EmitInlineSmiBinaryOp(expr->binary_operation(),
1782 op, 1813 op,
1783 expr->target(), 1814 expr->target(),
1784 expr->value()); 1815 expr->value());
1785 } else { 1816 } else {
1786 EmitBinaryOp(expr->binary_operation(), op); 1817 EmitBinaryOp(expr->binary_operation(), op);
1787 } 1818 }
1788 1819
1789 // Deoptimization point in case the binary operation may have side effects. 1820 // Deoptimization point in case the binary operation may have side effects.
1790 PrepareForBailout(expr->binary_operation(), TOS_REG); 1821 PrepareForBailout(expr->binary_operation(),
1822 Deoptimizer::BailoutState::TOS_REGISTER);
1791 } else { 1823 } else {
1792 VisitForAccumulatorValue(expr->value()); 1824 VisitForAccumulatorValue(expr->value());
1793 } 1825 }
1794 1826
1795 SetExpressionPosition(expr); 1827 SetExpressionPosition(expr);
1796 1828
1797 // Store the value. 1829 // Store the value.
1798 switch (assign_type) { 1830 switch (assign_type) {
1799 case VARIABLE: 1831 case VARIABLE:
1800 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1832 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1801 expr->op(), expr->AssignmentSlot()); 1833 expr->op(), expr->AssignmentSlot());
1802 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1834 PrepareForBailoutForId(expr->AssignmentId(),
1835 Deoptimizer::BailoutState::TOS_REGISTER);
1803 context()->Plug(v0); 1836 context()->Plug(v0);
1804 break; 1837 break;
1805 case NAMED_PROPERTY: 1838 case NAMED_PROPERTY:
1806 EmitNamedPropertyAssignment(expr); 1839 EmitNamedPropertyAssignment(expr);
1807 break; 1840 break;
1808 case NAMED_SUPER_PROPERTY: 1841 case NAMED_SUPER_PROPERTY:
1809 EmitNamedSuperPropertyStore(property); 1842 EmitNamedSuperPropertyStore(property);
1810 context()->Plug(v0); 1843 context()->Plug(v0);
1811 break; 1844 break;
1812 case KEYED_SUPER_PROPERTY: 1845 case KEYED_SUPER_PROPERTY:
(...skipping 456 matching lines...) Expand 10 before | Expand all | Expand 10 after
2269 DCHECK(prop != NULL); 2302 DCHECK(prop != NULL);
2270 DCHECK(prop->key()->IsLiteral()); 2303 DCHECK(prop->key()->IsLiteral());
2271 2304
2272 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2305 __ mov(StoreDescriptor::ValueRegister(), result_register());
2273 __ li(StoreDescriptor::NameRegister(), 2306 __ li(StoreDescriptor::NameRegister(),
2274 Operand(prop->key()->AsLiteral()->value())); 2307 Operand(prop->key()->AsLiteral()->value()));
2275 PopOperand(StoreDescriptor::ReceiverRegister()); 2308 PopOperand(StoreDescriptor::ReceiverRegister());
2276 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2309 EmitLoadStoreICSlot(expr->AssignmentSlot());
2277 CallStoreIC(); 2310 CallStoreIC();
2278 2311
2279 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2312 PrepareForBailoutForId(expr->AssignmentId(),
2313 Deoptimizer::BailoutState::TOS_REGISTER);
2280 context()->Plug(v0); 2314 context()->Plug(v0);
2281 } 2315 }
2282 2316
2283 2317
2284 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { 2318 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2285 // Assignment to named property of super. 2319 // Assignment to named property of super.
2286 // v0 : value 2320 // v0 : value
2287 // stack : receiver ('this'), home_object 2321 // stack : receiver ('this'), home_object
2288 DCHECK(prop != NULL); 2322 DCHECK(prop != NULL);
2289 Literal* key = prop->key()->AsLiteral(); 2323 Literal* key = prop->key()->AsLiteral();
(...skipping 30 matching lines...) Expand all
2320 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2354 __ mov(StoreDescriptor::ValueRegister(), result_register());
2321 PopOperands(StoreDescriptor::ReceiverRegister(), 2355 PopOperands(StoreDescriptor::ReceiverRegister(),
2322 StoreDescriptor::NameRegister()); 2356 StoreDescriptor::NameRegister());
2323 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 2357 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2324 2358
2325 Handle<Code> ic = 2359 Handle<Code> ic =
2326 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 2360 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2327 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2361 EmitLoadStoreICSlot(expr->AssignmentSlot());
2328 CallIC(ic); 2362 CallIC(ic);
2329 2363
2330 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2364 PrepareForBailoutForId(expr->AssignmentId(),
2365 Deoptimizer::BailoutState::TOS_REGISTER);
2331 context()->Plug(v0); 2366 context()->Plug(v0);
2332 } 2367 }
2333 2368
2334 2369
2335 void FullCodeGenerator::CallIC(Handle<Code> code, 2370 void FullCodeGenerator::CallIC(Handle<Code> code,
2336 TypeFeedbackId id) { 2371 TypeFeedbackId id) {
2337 ic_total_count_++; 2372 ic_total_count_++;
2338 __ Call(code, RelocInfo::CODE_TARGET, id); 2373 __ Call(code, RelocInfo::CODE_TARGET, id);
2339 } 2374 }
2340 2375
2341 2376
2342 // Code common for calls using the IC. 2377 // Code common for calls using the IC.
2343 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2378 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2344 Expression* callee = expr->expression(); 2379 Expression* callee = expr->expression();
2345 2380
2346 // Get the target function. 2381 // Get the target function.
2347 ConvertReceiverMode convert_mode; 2382 ConvertReceiverMode convert_mode;
2348 if (callee->IsVariableProxy()) { 2383 if (callee->IsVariableProxy()) {
2349 { StackValueContext context(this); 2384 { StackValueContext context(this);
2350 EmitVariableLoad(callee->AsVariableProxy()); 2385 EmitVariableLoad(callee->AsVariableProxy());
2351 PrepareForBailout(callee, NO_REGISTERS); 2386 PrepareForBailout(callee, Deoptimizer::BailoutState::NO_REGISTERS);
2352 } 2387 }
2353 // Push undefined as receiver. This is patched in the method prologue if it 2388 // Push undefined as receiver. This is patched in the method prologue if it
2354 // is a sloppy mode method. 2389 // is a sloppy mode method.
2355 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 2390 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2356 PushOperand(at); 2391 PushOperand(at);
2357 convert_mode = ConvertReceiverMode::kNullOrUndefined; 2392 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2358 } else { 2393 } else {
2359 // Load the function from the receiver. 2394 // Load the function from the receiver.
2360 DCHECK(callee->IsProperty()); 2395 DCHECK(callee->IsProperty());
2361 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2396 DCHECK(!callee->AsProperty()->IsSuperAccess());
2362 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2397 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2363 EmitNamedPropertyLoad(callee->AsProperty()); 2398 EmitNamedPropertyLoad(callee->AsProperty());
2364 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2399 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2400 Deoptimizer::BailoutState::TOS_REGISTER);
2365 // Push the target function under the receiver. 2401 // Push the target function under the receiver.
2366 __ lw(at, MemOperand(sp, 0)); 2402 __ lw(at, MemOperand(sp, 0));
2367 PushOperand(at); 2403 PushOperand(at);
2368 __ sw(v0, MemOperand(sp, kPointerSize)); 2404 __ sw(v0, MemOperand(sp, kPointerSize));
2369 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 2405 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2370 } 2406 }
2371 2407
2372 EmitCall(expr, convert_mode); 2408 EmitCall(expr, convert_mode);
2373 } 2409 }
2374 2410
(...skipping 16 matching lines...) Expand all
2391 PushOperands(scratch, v0, v0, scratch); 2427 PushOperands(scratch, v0, v0, scratch);
2392 PushOperand(key->value()); 2428 PushOperand(key->value());
2393 2429
2394 // Stack here: 2430 // Stack here:
2395 // - home_object 2431 // - home_object
2396 // - this (receiver) 2432 // - this (receiver)
2397 // - this (receiver) <-- LoadFromSuper will pop here and below. 2433 // - this (receiver) <-- LoadFromSuper will pop here and below.
2398 // - home_object 2434 // - home_object
2399 // - key 2435 // - key
2400 CallRuntimeWithOperands(Runtime::kLoadFromSuper); 2436 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2401 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 2437 PrepareForBailoutForId(prop->LoadId(),
2438 Deoptimizer::BailoutState::TOS_REGISTER);
2402 2439
2403 // Replace home_object with target function. 2440 // Replace home_object with target function.
2404 __ sw(v0, MemOperand(sp, kPointerSize)); 2441 __ sw(v0, MemOperand(sp, kPointerSize));
2405 2442
2406 // Stack here: 2443 // Stack here:
2407 // - target function 2444 // - target function
2408 // - this (receiver) 2445 // - this (receiver)
2409 EmitCall(expr); 2446 EmitCall(expr);
2410 } 2447 }
2411 2448
2412 2449
2413 // Code common for calls using the IC. 2450 // Code common for calls using the IC.
2414 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2451 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2415 Expression* key) { 2452 Expression* key) {
2416 // Load the key. 2453 // Load the key.
2417 VisitForAccumulatorValue(key); 2454 VisitForAccumulatorValue(key);
2418 2455
2419 Expression* callee = expr->expression(); 2456 Expression* callee = expr->expression();
2420 2457
2421 // Load the function from the receiver. 2458 // Load the function from the receiver.
2422 DCHECK(callee->IsProperty()); 2459 DCHECK(callee->IsProperty());
2423 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2460 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2424 __ Move(LoadDescriptor::NameRegister(), v0); 2461 __ Move(LoadDescriptor::NameRegister(), v0);
2425 EmitKeyedPropertyLoad(callee->AsProperty()); 2462 EmitKeyedPropertyLoad(callee->AsProperty());
2426 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2463 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2464 Deoptimizer::BailoutState::TOS_REGISTER);
2427 2465
2428 // Push the target function under the receiver. 2466 // Push the target function under the receiver.
2429 __ lw(at, MemOperand(sp, 0)); 2467 __ lw(at, MemOperand(sp, 0));
2430 PushOperand(at); 2468 PushOperand(at);
2431 __ sw(v0, MemOperand(sp, kPointerSize)); 2469 __ sw(v0, MemOperand(sp, kPointerSize));
2432 2470
2433 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 2471 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2434 } 2472 }
2435 2473
2436 2474
(...skipping 13 matching lines...) Expand all
2450 PushOperands(scratch, v0, v0, scratch); 2488 PushOperands(scratch, v0, v0, scratch);
2451 VisitForStackValue(prop->key()); 2489 VisitForStackValue(prop->key());
2452 2490
2453 // Stack here: 2491 // Stack here:
2454 // - home_object 2492 // - home_object
2455 // - this (receiver) 2493 // - this (receiver)
2456 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. 2494 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2457 // - home_object 2495 // - home_object
2458 // - key 2496 // - key
2459 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); 2497 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2460 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 2498 PrepareForBailoutForId(prop->LoadId(),
2499 Deoptimizer::BailoutState::TOS_REGISTER);
2461 2500
2462 // Replace home_object with target function. 2501 // Replace home_object with target function.
2463 __ sw(v0, MemOperand(sp, kPointerSize)); 2502 __ sw(v0, MemOperand(sp, kPointerSize));
2464 2503
2465 // Stack here: 2504 // Stack here:
2466 // - target function 2505 // - target function
2467 // - this (receiver) 2506 // - this (receiver)
2468 EmitCall(expr); 2507 EmitCall(expr);
2469 } 2508 }
2470 2509
2471 2510
2472 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 2511 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2473 // Load the arguments. 2512 // Load the arguments.
2474 ZoneList<Expression*>* args = expr->arguments(); 2513 ZoneList<Expression*>* args = expr->arguments();
2475 int arg_count = args->length(); 2514 int arg_count = args->length();
2476 for (int i = 0; i < arg_count; i++) { 2515 for (int i = 0; i < arg_count; i++) {
2477 VisitForStackValue(args->at(i)); 2516 VisitForStackValue(args->at(i));
2478 } 2517 }
2479 2518
2480 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 2519 PrepareForBailoutForId(expr->CallId(),
2520 Deoptimizer::BailoutState::NO_REGISTERS);
2481 // Record source position of the IC call. 2521 // Record source position of the IC call.
2482 SetCallPosition(expr, expr->tail_call_mode()); 2522 SetCallPosition(expr, expr->tail_call_mode());
2483 if (expr->tail_call_mode() == TailCallMode::kAllow) { 2523 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2484 if (FLAG_trace) { 2524 if (FLAG_trace) {
2485 __ CallRuntime(Runtime::kTraceTailCall); 2525 __ CallRuntime(Runtime::kTraceTailCall);
2486 } 2526 }
2487 // Update profiling counters before the tail call since we will 2527 // Update profiling counters before the tail call since we will
2488 // not return to this function. 2528 // not return to this function.
2489 EmitProfilingCounterHandlingForReturnSequence(true); 2529 EmitProfilingCounterHandlingForReturnSequence(true);
2490 } 2530 }
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
2540 // Generate code for loading from variables potentially shadowed by 2580 // Generate code for loading from variables potentially shadowed by
2541 // eval-introduced variables. 2581 // eval-introduced variables.
2542 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); 2582 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2543 2583
2544 __ bind(&slow); 2584 __ bind(&slow);
2545 // Call the runtime to find the function to call (returned in v0) 2585 // Call the runtime to find the function to call (returned in v0)
2546 // and the object holding it (returned in v1). 2586 // and the object holding it (returned in v1).
2547 __ Push(callee->name()); 2587 __ Push(callee->name());
2548 __ CallRuntime(Runtime::kLoadLookupSlotForCall); 2588 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2549 PushOperands(v0, v1); // Function, receiver. 2589 PushOperands(v0, v1); // Function, receiver.
2550 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); 2590 PrepareForBailoutForId(expr->LookupId(),
2591 Deoptimizer::BailoutState::NO_REGISTERS);
2551 2592
2552 // If fast case code has been generated, emit code to push the 2593 // If fast case code has been generated, emit code to push the
2553 // function and receiver and have the slow path jump around this 2594 // function and receiver and have the slow path jump around this
2554 // code. 2595 // code.
2555 if (done.is_linked()) { 2596 if (done.is_linked()) {
2556 Label call; 2597 Label call;
2557 __ Branch(&call); 2598 __ Branch(&call);
2558 __ bind(&done); 2599 __ bind(&done);
2559 // Push function. 2600 // Push function.
2560 __ push(v0); 2601 __ push(v0);
(...skipping 27 matching lines...) Expand all
2588 2629
2589 // Push a copy of the function (found below the arguments) and 2630 // Push a copy of the function (found below the arguments) and
2590 // resolve eval. 2631 // resolve eval.
2591 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2632 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2592 __ push(a1); 2633 __ push(a1);
2593 EmitResolvePossiblyDirectEval(expr); 2634 EmitResolvePossiblyDirectEval(expr);
2594 2635
2595 // Touch up the stack with the resolved function. 2636 // Touch up the stack with the resolved function.
2596 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2637 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2597 2638
2598 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); 2639 PrepareForBailoutForId(expr->EvalId(),
2640 Deoptimizer::BailoutState::NO_REGISTERS);
2599 // Record source position for debugger. 2641 // Record source position for debugger.
2600 SetCallPosition(expr); 2642 SetCallPosition(expr);
2601 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2643 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2602 __ li(a0, Operand(arg_count)); 2644 __ li(a0, Operand(arg_count));
2603 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, 2645 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2604 expr->tail_call_mode()), 2646 expr->tail_call_mode()),
2605 RelocInfo::CODE_TARGET); 2647 RelocInfo::CODE_TARGET);
2606 OperandStackDepthDecrement(arg_count + 1); 2648 OperandStackDepthDecrement(arg_count + 1);
2607 RecordJSReturnSite(expr); 2649 RecordJSReturnSite(expr);
2608 RestoreContext(); 2650 RestoreContext();
(...skipping 28 matching lines...) Expand all
2637 __ li(a0, Operand(arg_count)); 2679 __ li(a0, Operand(arg_count));
2638 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); 2680 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2639 2681
2640 // Record call targets in unoptimized code. 2682 // Record call targets in unoptimized code.
2641 __ EmitLoadTypeFeedbackVector(a2); 2683 __ EmitLoadTypeFeedbackVector(a2);
2642 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); 2684 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2643 2685
2644 CallConstructStub stub(isolate()); 2686 CallConstructStub stub(isolate());
2645 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); 2687 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2646 OperandStackDepthDecrement(arg_count + 1); 2688 OperandStackDepthDecrement(arg_count + 1);
2647 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2689 PrepareForBailoutForId(expr->ReturnId(),
2690 Deoptimizer::BailoutState::TOS_REGISTER);
2648 RestoreContext(); 2691 RestoreContext();
2649 context()->Plug(v0); 2692 context()->Plug(v0);
2650 } 2693 }
2651 2694
2652 2695
2653 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { 2696 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2654 SuperCallReference* super_call_ref = 2697 SuperCallReference* super_call_ref =
2655 expr->expression()->AsSuperCallReference(); 2698 expr->expression()->AsSuperCallReference();
2656 DCHECK_NOT_NULL(super_call_ref); 2699 DCHECK_NOT_NULL(super_call_ref);
2657 2700
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after
3084 } 3127 }
3085 3128
3086 3129
3087 void FullCodeGenerator::EmitCall(CallRuntime* expr) { 3130 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3088 ZoneList<Expression*>* args = expr->arguments(); 3131 ZoneList<Expression*>* args = expr->arguments();
3089 DCHECK_LE(2, args->length()); 3132 DCHECK_LE(2, args->length());
3090 // Push target, receiver and arguments onto the stack. 3133 // Push target, receiver and arguments onto the stack.
3091 for (Expression* const arg : *args) { 3134 for (Expression* const arg : *args) {
3092 VisitForStackValue(arg); 3135 VisitForStackValue(arg);
3093 } 3136 }
3094 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 3137 PrepareForBailoutForId(expr->CallId(),
3138 Deoptimizer::BailoutState::NO_REGISTERS);
3095 // Move target to a1. 3139 // Move target to a1.
3096 int const argc = args->length() - 2; 3140 int const argc = args->length() - 2;
3097 __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize)); 3141 __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3098 // Call the target. 3142 // Call the target.
3099 __ li(a0, Operand(argc)); 3143 __ li(a0, Operand(argc));
3100 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 3144 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3101 OperandStackDepthDecrement(argc + 1); 3145 OperandStackDepthDecrement(argc + 1);
3102 RestoreContext(); 3146 RestoreContext();
3103 // Discard the function left on TOS. 3147 // Discard the function left on TOS.
3104 context()->DropAndPlug(1, v0); 3148 context()->DropAndPlug(1, v0);
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
3294 // because we need to prepare a pair of extra administrative AST ids 3338 // because we need to prepare a pair of extra administrative AST ids
3295 // for the optimizing compiler. 3339 // for the optimizing compiler.
3296 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 3340 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3297 Label materialize_true, materialize_false, done; 3341 Label materialize_true, materialize_false, done;
3298 VisitForControl(expr->expression(), 3342 VisitForControl(expr->expression(),
3299 &materialize_false, 3343 &materialize_false,
3300 &materialize_true, 3344 &materialize_true,
3301 &materialize_true); 3345 &materialize_true);
3302 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); 3346 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3303 __ bind(&materialize_true); 3347 __ bind(&materialize_true);
3304 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 3348 PrepareForBailoutForId(expr->MaterializeTrueId(),
3349 Deoptimizer::BailoutState::NO_REGISTERS);
3305 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 3350 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3306 if (context()->IsStackValue()) __ push(v0); 3351 if (context()->IsStackValue()) __ push(v0);
3307 __ jmp(&done); 3352 __ jmp(&done);
3308 __ bind(&materialize_false); 3353 __ bind(&materialize_false);
3309 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 3354 PrepareForBailoutForId(expr->MaterializeFalseId(),
3355 Deoptimizer::BailoutState::NO_REGISTERS);
3310 __ LoadRoot(v0, Heap::kFalseValueRootIndex); 3356 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3311 if (context()->IsStackValue()) __ push(v0); 3357 if (context()->IsStackValue()) __ push(v0);
3312 __ bind(&done); 3358 __ bind(&done);
3313 } 3359 }
3314 break; 3360 break;
3315 } 3361 }
3316 3362
3317 case Token::TYPEOF: { 3363 case Token::TYPEOF: {
3318 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 3364 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3319 { 3365 {
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
3399 } 3445 }
3400 3446
3401 case VARIABLE: 3447 case VARIABLE:
3402 UNREACHABLE(); 3448 UNREACHABLE();
3403 } 3449 }
3404 } 3450 }
3405 3451
3406 // We need a second deoptimization point after loading the value 3452 // We need a second deoptimization point after loading the value
3407 // in case evaluating the property load my have a side effect. 3453 // in case evaluating the property load my have a side effect.
3408 if (assign_type == VARIABLE) { 3454 if (assign_type == VARIABLE) {
3409 PrepareForBailout(expr->expression(), TOS_REG); 3455 PrepareForBailout(expr->expression(),
3456 Deoptimizer::BailoutState::TOS_REGISTER);
3410 } else { 3457 } else {
3411 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 3458 PrepareForBailoutForId(prop->LoadId(),
3459 Deoptimizer::BailoutState::TOS_REGISTER);
3412 } 3460 }
3413 3461
3414 // Inline smi case if we are in a loop. 3462 // Inline smi case if we are in a loop.
3415 Label stub_call, done; 3463 Label stub_call, done;
3416 JumpPatchSite patch_site(masm_); 3464 JumpPatchSite patch_site(masm_);
3417 3465
3418 int count_value = expr->op() == Token::INC ? 1 : -1; 3466 int count_value = expr->op() == Token::INC ? 1 : -1;
3419 __ mov(a0, v0); 3467 __ mov(a0, v0);
3420 if (ShouldInlineSmiCase(expr->op())) { 3468 if (ShouldInlineSmiCase(expr->op())) {
3421 Label slow; 3469 Label slow;
(...skipping 30 matching lines...) Expand all
3452 __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done); 3500 __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3453 // Call stub. Undo operation first. 3501 // Call stub. Undo operation first.
3454 __ Move(v0, a0); 3502 __ Move(v0, a0);
3455 __ jmp(&stub_call); 3503 __ jmp(&stub_call);
3456 __ bind(&slow); 3504 __ bind(&slow);
3457 } 3505 }
3458 3506
3459 // Convert old value into a number. 3507 // Convert old value into a number.
3460 ToNumberStub convert_stub(isolate()); 3508 ToNumberStub convert_stub(isolate());
3461 __ CallStub(&convert_stub); 3509 __ CallStub(&convert_stub);
3462 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); 3510 PrepareForBailoutForId(expr->ToNumberId(),
3511 Deoptimizer::BailoutState::TOS_REGISTER);
3463 3512
3464 // Save result for postfix expressions. 3513 // Save result for postfix expressions.
3465 if (expr->is_postfix()) { 3514 if (expr->is_postfix()) {
3466 if (!context()->IsEffect()) { 3515 if (!context()->IsEffect()) {
3467 // Save the result on the stack. If we have a named or keyed property 3516 // Save the result on the stack. If we have a named or keyed property
3468 // we store the result under the receiver that is currently on top 3517 // we store the result under the receiver that is currently on top
3469 // of the stack. 3518 // of the stack.
3470 switch (assign_type) { 3519 switch (assign_type) {
3471 case VARIABLE: 3520 case VARIABLE:
3472 PushOperand(v0); 3521 PushOperand(v0);
(...skipping 25 matching lines...) Expand all
3498 patch_site.EmitPatchInfo(); 3547 patch_site.EmitPatchInfo();
3499 __ bind(&done); 3548 __ bind(&done);
3500 3549
3501 // Store the value returned in v0. 3550 // Store the value returned in v0.
3502 switch (assign_type) { 3551 switch (assign_type) {
3503 case VARIABLE: 3552 case VARIABLE:
3504 if (expr->is_postfix()) { 3553 if (expr->is_postfix()) {
3505 { EffectContext context(this); 3554 { EffectContext context(this);
3506 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3555 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3507 Token::ASSIGN, expr->CountSlot()); 3556 Token::ASSIGN, expr->CountSlot());
3508 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3557 PrepareForBailoutForId(expr->AssignmentId(),
3558 Deoptimizer::BailoutState::TOS_REGISTER);
3509 context.Plug(v0); 3559 context.Plug(v0);
3510 } 3560 }
3511 // For all contexts except EffectConstant we have the result on 3561 // For all contexts except EffectConstant we have the result on
3512 // top of the stack. 3562 // top of the stack.
3513 if (!context()->IsEffect()) { 3563 if (!context()->IsEffect()) {
3514 context()->PlugTOS(); 3564 context()->PlugTOS();
3515 } 3565 }
3516 } else { 3566 } else {
3517 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3567 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3518 Token::ASSIGN, expr->CountSlot()); 3568 Token::ASSIGN, expr->CountSlot());
3519 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3569 PrepareForBailoutForId(expr->AssignmentId(),
3570 Deoptimizer::BailoutState::TOS_REGISTER);
3520 context()->Plug(v0); 3571 context()->Plug(v0);
3521 } 3572 }
3522 break; 3573 break;
3523 case NAMED_PROPERTY: { 3574 case NAMED_PROPERTY: {
3524 __ mov(StoreDescriptor::ValueRegister(), result_register()); 3575 __ mov(StoreDescriptor::ValueRegister(), result_register());
3525 __ li(StoreDescriptor::NameRegister(), 3576 __ li(StoreDescriptor::NameRegister(),
3526 Operand(prop->key()->AsLiteral()->value())); 3577 Operand(prop->key()->AsLiteral()->value()));
3527 PopOperand(StoreDescriptor::ReceiverRegister()); 3578 PopOperand(StoreDescriptor::ReceiverRegister());
3528 EmitLoadStoreICSlot(expr->CountSlot()); 3579 EmitLoadStoreICSlot(expr->CountSlot());
3529 CallStoreIC(); 3580 CallStoreIC();
3530 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3581 PrepareForBailoutForId(expr->AssignmentId(),
3582 Deoptimizer::BailoutState::TOS_REGISTER);
3531 if (expr->is_postfix()) { 3583 if (expr->is_postfix()) {
3532 if (!context()->IsEffect()) { 3584 if (!context()->IsEffect()) {
3533 context()->PlugTOS(); 3585 context()->PlugTOS();
3534 } 3586 }
3535 } else { 3587 } else {
3536 context()->Plug(v0); 3588 context()->Plug(v0);
3537 } 3589 }
3538 break; 3590 break;
3539 } 3591 }
3540 case NAMED_SUPER_PROPERTY: { 3592 case NAMED_SUPER_PROPERTY: {
(...skipping 19 matching lines...) Expand all
3560 break; 3612 break;
3561 } 3613 }
3562 case KEYED_PROPERTY: { 3614 case KEYED_PROPERTY: {
3563 __ mov(StoreDescriptor::ValueRegister(), result_register()); 3615 __ mov(StoreDescriptor::ValueRegister(), result_register());
3564 PopOperands(StoreDescriptor::ReceiverRegister(), 3616 PopOperands(StoreDescriptor::ReceiverRegister(),
3565 StoreDescriptor::NameRegister()); 3617 StoreDescriptor::NameRegister());
3566 Handle<Code> ic = 3618 Handle<Code> ic =
3567 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 3619 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3568 EmitLoadStoreICSlot(expr->CountSlot()); 3620 EmitLoadStoreICSlot(expr->CountSlot());
3569 CallIC(ic); 3621 CallIC(ic);
3570 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3622 PrepareForBailoutForId(expr->AssignmentId(),
3623 Deoptimizer::BailoutState::TOS_REGISTER);
3571 if (expr->is_postfix()) { 3624 if (expr->is_postfix()) {
3572 if (!context()->IsEffect()) { 3625 if (!context()->IsEffect()) {
3573 context()->PlugTOS(); 3626 context()->PlugTOS();
3574 } 3627 }
3575 } else { 3628 } else {
3576 context()->Plug(v0); 3629 context()->Plug(v0);
3577 } 3630 }
3578 break; 3631 break;
3579 } 3632 }
3580 } 3633 }
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after
3944 reinterpret_cast<uint32_t>( 3997 reinterpret_cast<uint32_t>(
3945 isolate->builtins()->OnStackReplacement()->entry())); 3998 isolate->builtins()->OnStackReplacement()->entry()));
3946 return ON_STACK_REPLACEMENT; 3999 return ON_STACK_REPLACEMENT;
3947 } 4000 }
3948 4001
3949 4002
3950 } // namespace internal 4003 } // namespace internal
3951 } // namespace v8 4004 } // namespace v8
3952 4005
3953 #endif // V8_TARGET_ARCH_MIPS 4006 #endif // V8_TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698