Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(62)

Side by Side Diff: src/full-codegen/mips64/full-codegen-mips64.cc

Issue 1969423002: [Interpreter] Remove InterpreterExitTrampoline and replace with returning to the entry trampoline. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix typo on Arm64 Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 // Note on Mips implementation: 7 // Note on Mips implementation:
8 // 8 //
9 // The result_register() for mips is the 'v0' register, which is defined 9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first 10 // by the ABI to contain function return values. However, the first
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
178 // Possibly allocate a local context. 178 // Possibly allocate a local context.
179 if (info->scope()->num_heap_slots() > 0) { 179 if (info->scope()->num_heap_slots() > 0) {
180 Comment cmnt(masm_, "[ Allocate context"); 180 Comment cmnt(masm_, "[ Allocate context");
181 // Argument to NewContext is the function, which is still in a1. 181 // Argument to NewContext is the function, which is still in a1.
182 bool need_write_barrier = true; 182 bool need_write_barrier = true;
183 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 183 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184 if (info->scope()->is_script_scope()) { 184 if (info->scope()->is_script_scope()) {
185 __ push(a1); 185 __ push(a1);
186 __ Push(info->scope()->GetScopeInfo(info->isolate())); 186 __ Push(info->scope()->GetScopeInfo(info->isolate()));
187 __ CallRuntime(Runtime::kNewScriptContext); 187 __ CallRuntime(Runtime::kNewScriptContext);
188 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); 188 PrepareForBailoutForId(BailoutId::ScriptContext(),
189 Deoptimizer::BailoutState::TOS_REGISTER);
189 // The new target value is not used, clobbering is safe. 190 // The new target value is not used, clobbering is safe.
190 DCHECK_NULL(info->scope()->new_target_var()); 191 DCHECK_NULL(info->scope()->new_target_var());
191 } else { 192 } else {
192 if (info->scope()->new_target_var() != nullptr) { 193 if (info->scope()->new_target_var() != nullptr) {
193 __ push(a3); // Preserve new target. 194 __ push(a3); // Preserve new target.
194 } 195 }
195 if (slots <= FastNewContextStub::kMaximumSlots) { 196 if (slots <= FastNewContextStub::kMaximumSlots) {
196 FastNewContextStub stub(isolate(), slots); 197 FastNewContextStub stub(isolate(), slots);
197 __ CallStub(&stub); 198 __ CallStub(&stub);
198 // Result of FastNewContextStub is always in new space. 199 // Result of FastNewContextStub is always in new space.
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
234 __ Abort(kExpectedNewSpaceObject); 235 __ Abort(kExpectedNewSpaceObject);
235 __ bind(&done); 236 __ bind(&done);
236 } 237 }
237 } 238 }
238 } 239 }
239 } 240 }
240 241
241 // Register holding this function and new target are both trashed in case we 242 // Register holding this function and new target are both trashed in case we
242 // bailout here. But since that can happen only when new target is not used 243 // bailout here. But since that can happen only when new target is not used
243 // and we allocate a context, the value of |function_in_register| is correct. 244 // and we allocate a context, the value of |function_in_register| is correct.
244 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); 245 PrepareForBailoutForId(BailoutId::FunctionContext(),
246 Deoptimizer::BailoutState::NO_REGISTERS);
245 247
246 // Possibly set up a local binding to the this function which is used in 248 // Possibly set up a local binding to the this function which is used in
247 // derived constructors with super calls. 249 // derived constructors with super calls.
248 Variable* this_function_var = scope()->this_function_var(); 250 Variable* this_function_var = scope()->this_function_var();
249 if (this_function_var != nullptr) { 251 if (this_function_var != nullptr) {
250 Comment cmnt(masm_, "[ This function"); 252 Comment cmnt(masm_, "[ This function");
251 if (!function_in_register_a1) { 253 if (!function_in_register_a1) {
252 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 254 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
253 // The write barrier clobbers register again, keep it marked as such. 255 // The write barrier clobbers register again, keep it marked as such.
254 } 256 }
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
295 } 297 }
296 298
297 SetVar(arguments, v0, a1, a2); 299 SetVar(arguments, v0, a1, a2);
298 } 300 }
299 301
300 if (FLAG_trace) { 302 if (FLAG_trace) {
301 __ CallRuntime(Runtime::kTraceEnter); 303 __ CallRuntime(Runtime::kTraceEnter);
302 } 304 }
303 305
304 // Visit the declarations and body. 306 // Visit the declarations and body.
305 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 307 PrepareForBailoutForId(BailoutId::FunctionEntry(),
308 Deoptimizer::BailoutState::NO_REGISTERS);
306 { 309 {
307 Comment cmnt(masm_, "[ Declarations"); 310 Comment cmnt(masm_, "[ Declarations");
308 VisitDeclarations(scope()->declarations()); 311 VisitDeclarations(scope()->declarations());
309 } 312 }
310 313
311 // Assert that the declarations do not use ICs. Otherwise the debugger 314 // Assert that the declarations do not use ICs. Otherwise the debugger
312 // won't be able to redirect a PC at an IC to the correct IC in newly 315 // won't be able to redirect a PC at an IC to the correct IC in newly
313 // recompiled code. 316 // recompiled code.
314 DCHECK_EQ(0, ic_total_count_); 317 DCHECK_EQ(0, ic_total_count_);
315 318
316 { 319 {
317 Comment cmnt(masm_, "[ Stack check"); 320 Comment cmnt(masm_, "[ Stack check");
318 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 321 PrepareForBailoutForId(BailoutId::Declarations(),
322 Deoptimizer::BailoutState::NO_REGISTERS);
319 Label ok; 323 Label ok;
320 __ LoadRoot(at, Heap::kStackLimitRootIndex); 324 __ LoadRoot(at, Heap::kStackLimitRootIndex);
321 __ Branch(&ok, hs, sp, Operand(at)); 325 __ Branch(&ok, hs, sp, Operand(at));
322 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 326 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
323 PredictableCodeSizeScope predictable( 327 PredictableCodeSizeScope predictable(
324 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); 328 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
325 __ Call(stack_check, RelocInfo::CODE_TARGET); 329 __ Call(stack_check, RelocInfo::CODE_TARGET);
326 __ bind(&ok); 330 __ bind(&ok);
327 } 331 }
328 332
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
389 __ beq(at, zero_reg, &ok); 393 __ beq(at, zero_reg, &ok);
390 // Call will emit a li t9 first, so it is safe to use the delay slot. 394 // Call will emit a li t9 first, so it is safe to use the delay slot.
391 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
392 // Record a mapping of this PC offset to the OSR id. This is used to find 396 // Record a mapping of this PC offset to the OSR id. This is used to find
393 // the AST id from the unoptimized code in order to use it as a key into 397 // the AST id from the unoptimized code in order to use it as a key into
394 // the deoptimization input data found in the optimized code. 398 // the deoptimization input data found in the optimized code.
395 RecordBackEdge(stmt->OsrEntryId()); 399 RecordBackEdge(stmt->OsrEntryId());
396 EmitProfilingCounterReset(); 400 EmitProfilingCounterReset();
397 401
398 __ bind(&ok); 402 __ bind(&ok);
399 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 403 PrepareForBailoutForId(stmt->EntryId(),
404 Deoptimizer::BailoutState::NO_REGISTERS);
400 // Record a mapping of the OSR id to this PC. This is used if the OSR 405 // Record a mapping of the OSR id to this PC. This is used if the OSR
401 // entry becomes the target of a bailout. We don't expect it to be, but 406 // entry becomes the target of a bailout. We don't expect it to be, but
402 // we want it to work if it is. 407 // we want it to work if it is.
403 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 408 PrepareForBailoutForId(stmt->OsrEntryId(),
409 Deoptimizer::BailoutState::NO_REGISTERS);
404 } 410 }
405 411
406 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( 412 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
407 bool is_tail_call) { 413 bool is_tail_call) {
408 // Pretend that the exit is a backwards jump to the entry. 414 // Pretend that the exit is a backwards jump to the entry.
409 int weight = 1; 415 int weight = 1;
410 if (info_->ShouldSelfOptimize()) { 416 if (info_->ShouldSelfOptimize()) {
411 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 417 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
412 } else { 418 } else {
413 int distance = masm_->pc_offset(); 419 int distance = masm_->pc_offset();
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
719 bool should_normalize, 725 bool should_normalize,
720 Label* if_true, 726 Label* if_true,
721 Label* if_false) { 727 Label* if_false) {
722 // Only prepare for bailouts before splits if we're in a test 728 // Only prepare for bailouts before splits if we're in a test
723 // context. Otherwise, we let the Visit function deal with the 729 // context. Otherwise, we let the Visit function deal with the
724 // preparation to avoid preparing with the same AST id twice. 730 // preparation to avoid preparing with the same AST id twice.
725 if (!context()->IsTest()) return; 731 if (!context()->IsTest()) return;
726 732
727 Label skip; 733 Label skip;
728 if (should_normalize) __ Branch(&skip); 734 if (should_normalize) __ Branch(&skip);
729 PrepareForBailout(expr, TOS_REG); 735 PrepareForBailout(expr, Deoptimizer::BailoutState::TOS_REGISTER);
730 if (should_normalize) { 736 if (should_normalize) {
731 __ LoadRoot(a4, Heap::kTrueValueRootIndex); 737 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
732 Split(eq, a0, Operand(a4), if_true, if_false, NULL); 738 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
733 __ bind(&skip); 739 __ bind(&skip);
734 } 740 }
735 } 741 }
736 742
737 743
738 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 744 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
739 // The variable in the declaration always resides in the current function 745 // The variable in the declaration always resides in the current function
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
778 } 784 }
779 break; 785 break;
780 786
781 case VariableLocation::CONTEXT: 787 case VariableLocation::CONTEXT:
782 if (hole_init) { 788 if (hole_init) {
783 Comment cmnt(masm_, "[ VariableDeclaration"); 789 Comment cmnt(masm_, "[ VariableDeclaration");
784 EmitDebugCheckDeclarationContext(variable); 790 EmitDebugCheckDeclarationContext(variable);
785 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 791 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
786 __ sd(at, ContextMemOperand(cp, variable->index())); 792 __ sd(at, ContextMemOperand(cp, variable->index()));
787 // No write barrier since the_hole_value is in old space. 793 // No write barrier since the_hole_value is in old space.
788 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 794 PrepareForBailoutForId(proxy->id(),
795 Deoptimizer::BailoutState::NO_REGISTERS);
789 } 796 }
790 break; 797 break;
791 798
792 case VariableLocation::LOOKUP: { 799 case VariableLocation::LOOKUP: {
793 Comment cmnt(masm_, "[ VariableDeclaration"); 800 Comment cmnt(masm_, "[ VariableDeclaration");
794 __ li(a2, Operand(variable->name())); 801 __ li(a2, Operand(variable->name()));
795 // Declaration nodes are always introduced in one of four modes. 802 // Declaration nodes are always introduced in one of four modes.
796 DCHECK(IsDeclaredVariableMode(mode)); 803 DCHECK(IsDeclaredVariableMode(mode));
797 // Push initial value, if any. 804 // Push initial value, if any.
798 // Note: For variables we must not push an initial value (such as 805 // Note: For variables we must not push an initial value (such as
799 // 'undefined') because we may have a (legal) redeclaration and we 806 // 'undefined') because we may have a (legal) redeclaration and we
800 // must not destroy the current value. 807 // must not destroy the current value.
801 if (hole_init) { 808 if (hole_init) {
802 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 809 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
803 } else { 810 } else {
804 DCHECK(Smi::FromInt(0) == 0); 811 DCHECK(Smi::FromInt(0) == 0);
805 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 812 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
806 } 813 }
807 __ Push(a2, a0); 814 __ Push(a2, a0);
808 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); 815 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
809 __ CallRuntime(Runtime::kDeclareLookupSlot); 816 __ CallRuntime(Runtime::kDeclareLookupSlot);
810 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 817 PrepareForBailoutForId(proxy->id(),
818 Deoptimizer::BailoutState::NO_REGISTERS);
811 break; 819 break;
812 } 820 }
813 } 821 }
814 } 822 }
815 823
816 824
817 void FullCodeGenerator::VisitFunctionDeclaration( 825 void FullCodeGenerator::VisitFunctionDeclaration(
818 FunctionDeclaration* declaration) { 826 FunctionDeclaration* declaration) {
819 VariableProxy* proxy = declaration->proxy(); 827 VariableProxy* proxy = declaration->proxy();
820 Variable* variable = proxy->var(); 828 Variable* variable = proxy->var();
(...skipping 25 matching lines...) Expand all
846 int offset = Context::SlotOffset(variable->index()); 854 int offset = Context::SlotOffset(variable->index());
847 // We know that we have written a function, which is not a smi. 855 // We know that we have written a function, which is not a smi.
848 __ RecordWriteContextSlot(cp, 856 __ RecordWriteContextSlot(cp,
849 offset, 857 offset,
850 result_register(), 858 result_register(),
851 a2, 859 a2,
852 kRAHasBeenSaved, 860 kRAHasBeenSaved,
853 kDontSaveFPRegs, 861 kDontSaveFPRegs,
854 EMIT_REMEMBERED_SET, 862 EMIT_REMEMBERED_SET,
855 OMIT_SMI_CHECK); 863 OMIT_SMI_CHECK);
856 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 864 PrepareForBailoutForId(proxy->id(),
865 Deoptimizer::BailoutState::NO_REGISTERS);
857 break; 866 break;
858 } 867 }
859 868
860 case VariableLocation::LOOKUP: { 869 case VariableLocation::LOOKUP: {
861 Comment cmnt(masm_, "[ FunctionDeclaration"); 870 Comment cmnt(masm_, "[ FunctionDeclaration");
862 __ li(a2, Operand(variable->name())); 871 __ li(a2, Operand(variable->name()));
863 PushOperand(a2); 872 PushOperand(a2);
864 // Push initial value for function declaration. 873 // Push initial value for function declaration.
865 VisitForStackValue(declaration->fun()); 874 VisitForStackValue(declaration->fun());
866 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); 875 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
867 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); 876 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
868 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 877 PrepareForBailoutForId(proxy->id(),
878 Deoptimizer::BailoutState::NO_REGISTERS);
869 break; 879 break;
870 } 880 }
871 } 881 }
872 } 882 }
873 883
874 884
875 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 885 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
876 // Call the runtime to declare the globals. 886 // Call the runtime to declare the globals.
877 __ li(a1, Operand(pairs)); 887 __ li(a1, Operand(pairs));
878 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 888 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
(...skipping 11 matching lines...) Expand all
890 } 900 }
891 901
892 902
893 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 903 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
894 Comment cmnt(masm_, "[ SwitchStatement"); 904 Comment cmnt(masm_, "[ SwitchStatement");
895 Breakable nested_statement(this, stmt); 905 Breakable nested_statement(this, stmt);
896 SetStatementPosition(stmt); 906 SetStatementPosition(stmt);
897 907
898 // Keep the switch value on the stack until a case matches. 908 // Keep the switch value on the stack until a case matches.
899 VisitForStackValue(stmt->tag()); 909 VisitForStackValue(stmt->tag());
900 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 910 PrepareForBailoutForId(stmt->EntryId(),
911 Deoptimizer::BailoutState::NO_REGISTERS);
901 912
902 ZoneList<CaseClause*>* clauses = stmt->cases(); 913 ZoneList<CaseClause*>* clauses = stmt->cases();
903 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 914 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
904 915
905 Label next_test; // Recycled for each test. 916 Label next_test; // Recycled for each test.
906 // Compile all the tests with branches to their bodies. 917 // Compile all the tests with branches to their bodies.
907 for (int i = 0; i < clauses->length(); i++) { 918 for (int i = 0; i < clauses->length(); i++) {
908 CaseClause* clause = clauses->at(i); 919 CaseClause* clause = clauses->at(i);
909 clause->body_target()->Unuse(); 920 clause->body_target()->Unuse();
910 921
(...skipping 29 matching lines...) Expand all
940 951
941 // Record position before stub call for type feedback. 952 // Record position before stub call for type feedback.
942 SetExpressionPosition(clause); 953 SetExpressionPosition(clause);
943 Handle<Code> ic = 954 Handle<Code> ic =
944 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 955 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
945 CallIC(ic, clause->CompareId()); 956 CallIC(ic, clause->CompareId());
946 patch_site.EmitPatchInfo(); 957 patch_site.EmitPatchInfo();
947 958
948 Label skip; 959 Label skip;
949 __ Branch(&skip); 960 __ Branch(&skip);
950 PrepareForBailout(clause, TOS_REG); 961 PrepareForBailout(clause, Deoptimizer::BailoutState::TOS_REGISTER);
951 __ LoadRoot(at, Heap::kTrueValueRootIndex); 962 __ LoadRoot(at, Heap::kTrueValueRootIndex);
952 __ Branch(&next_test, ne, v0, Operand(at)); 963 __ Branch(&next_test, ne, v0, Operand(at));
953 __ Drop(1); 964 __ Drop(1);
954 __ Branch(clause->body_target()); 965 __ Branch(clause->body_target());
955 __ bind(&skip); 966 __ bind(&skip);
956 967
957 __ Branch(&next_test, ne, v0, Operand(zero_reg)); 968 __ Branch(&next_test, ne, v0, Operand(zero_reg));
958 __ Drop(1); // Switch value is no longer needed. 969 __ Drop(1); // Switch value is no longer needed.
959 __ Branch(clause->body_target()); 970 __ Branch(clause->body_target());
960 } 971 }
961 972
962 // Discard the test value and jump to the default if present, otherwise to 973 // Discard the test value and jump to the default if present, otherwise to
963 // the end of the statement. 974 // the end of the statement.
964 __ bind(&next_test); 975 __ bind(&next_test);
965 DropOperands(1); // Switch value is no longer needed. 976 DropOperands(1); // Switch value is no longer needed.
966 if (default_clause == NULL) { 977 if (default_clause == NULL) {
967 __ Branch(nested_statement.break_label()); 978 __ Branch(nested_statement.break_label());
968 } else { 979 } else {
969 __ Branch(default_clause->body_target()); 980 __ Branch(default_clause->body_target());
970 } 981 }
971 982
972 // Compile all the case bodies. 983 // Compile all the case bodies.
973 for (int i = 0; i < clauses->length(); i++) { 984 for (int i = 0; i < clauses->length(); i++) {
974 Comment cmnt(masm_, "[ Case body"); 985 Comment cmnt(masm_, "[ Case body");
975 CaseClause* clause = clauses->at(i); 986 CaseClause* clause = clauses->at(i);
976 __ bind(clause->body_target()); 987 __ bind(clause->body_target());
977 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 988 PrepareForBailoutForId(clause->EntryId(),
989 Deoptimizer::BailoutState::NO_REGISTERS);
978 VisitStatements(clause->statements()); 990 VisitStatements(clause->statements());
979 } 991 }
980 992
981 __ bind(nested_statement.break_label()); 993 __ bind(nested_statement.break_label());
982 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 994 PrepareForBailoutForId(stmt->ExitId(),
995 Deoptimizer::BailoutState::NO_REGISTERS);
983 } 996 }
984 997
985 998
986 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 999 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
987 Comment cmnt(masm_, "[ ForInStatement"); 1000 Comment cmnt(masm_, "[ ForInStatement");
988 SetStatementPosition(stmt, SKIP_BREAK); 1001 SetStatementPosition(stmt, SKIP_BREAK);
989 1002
990 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); 1003 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
991 1004
992 // Get the object to enumerate over. If the object is null or undefined, skip 1005 // Get the object to enumerate over. If the object is null or undefined, skip
(...skipping 16 matching lines...) Expand all
1009 Operand(FIRST_JS_RECEIVER_TYPE)); 1022 Operand(FIRST_JS_RECEIVER_TYPE));
1010 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot. 1023 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1011 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at)); 1024 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1012 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot. 1025 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1013 __ Branch(&exit, eq, a0, Operand(at)); 1026 __ Branch(&exit, eq, a0, Operand(at));
1014 __ bind(&convert); 1027 __ bind(&convert);
1015 ToObjectStub stub(isolate()); 1028 ToObjectStub stub(isolate());
1016 __ CallStub(&stub); 1029 __ CallStub(&stub);
1017 __ mov(a0, v0); 1030 __ mov(a0, v0);
1018 __ bind(&done_convert); 1031 __ bind(&done_convert);
1019 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); 1032 PrepareForBailoutForId(stmt->ToObjectId(),
1033 Deoptimizer::BailoutState::TOS_REGISTER);
1020 __ push(a0); 1034 __ push(a0);
1021 1035
1022 // Check cache validity in generated code. If we cannot guarantee cache 1036 // Check cache validity in generated code. If we cannot guarantee cache
1023 // validity, call the runtime system to check cache validity or get the 1037 // validity, call the runtime system to check cache validity or get the
1024 // property names in a fixed array. Note: Proxies never have an enum cache, 1038 // property names in a fixed array. Note: Proxies never have an enum cache,
1025 // so will always take the slow path. 1039 // so will always take the slow path.
1026 Label call_runtime; 1040 Label call_runtime;
1027 __ CheckEnumCache(&call_runtime); 1041 __ CheckEnumCache(&call_runtime);
1028 1042
1029 // The enum cache is valid. Load the map of the object being 1043 // The enum cache is valid. Load the map of the object being
1030 // iterated over and use the cache for the iteration. 1044 // iterated over and use the cache for the iteration.
1031 Label use_cache; 1045 Label use_cache;
1032 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); 1046 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1033 __ Branch(&use_cache); 1047 __ Branch(&use_cache);
1034 1048
1035 // Get the set of properties to enumerate. 1049 // Get the set of properties to enumerate.
1036 __ bind(&call_runtime); 1050 __ bind(&call_runtime);
1037 __ push(a0); // Duplicate the enumerable object on the stack. 1051 __ push(a0); // Duplicate the enumerable object on the stack.
1038 __ CallRuntime(Runtime::kForInEnumerate); 1052 __ CallRuntime(Runtime::kForInEnumerate);
1039 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); 1053 PrepareForBailoutForId(stmt->EnumId(),
1054 Deoptimizer::BailoutState::TOS_REGISTER);
1040 1055
1041 // If we got a map from the runtime call, we can do a fast 1056 // If we got a map from the runtime call, we can do a fast
1042 // modification check. Otherwise, we got a fixed array, and we have 1057 // modification check. Otherwise, we got a fixed array, and we have
1043 // to do a slow check. 1058 // to do a slow check.
1044 Label fixed_array; 1059 Label fixed_array;
1045 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 1060 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1046 __ LoadRoot(at, Heap::kMetaMapRootIndex); 1061 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1047 __ Branch(&fixed_array, ne, a2, Operand(at)); 1062 __ Branch(&fixed_array, ne, a2, Operand(at));
1048 1063
1049 // We got a map in register v0. Get the enumeration cache from it. 1064 // We got a map in register v0. Get the enumeration cache from it.
(...skipping 17 matching lines...) Expand all
1067 __ Drop(1); 1082 __ Drop(1);
1068 __ jmp(&exit); 1083 __ jmp(&exit);
1069 1084
1070 // We got a fixed array in register v0. Iterate through that. 1085 // We got a fixed array in register v0. Iterate through that.
1071 __ bind(&fixed_array); 1086 __ bind(&fixed_array);
1072 1087
1073 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check 1088 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1074 __ Push(a1, v0); // Smi and array 1089 __ Push(a1, v0); // Smi and array
1075 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); 1090 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1076 __ Push(a1); // Fixed array length (as smi). 1091 __ Push(a1); // Fixed array length (as smi).
1077 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); 1092 PrepareForBailoutForId(stmt->PrepareId(),
1093 Deoptimizer::BailoutState::NO_REGISTERS);
1078 __ li(a0, Operand(Smi::FromInt(0))); 1094 __ li(a0, Operand(Smi::FromInt(0)));
1079 __ Push(a0); // Initial index. 1095 __ Push(a0); // Initial index.
1080 1096
1081 // Generate code for doing the condition check. 1097 // Generate code for doing the condition check.
1082 __ bind(&loop); 1098 __ bind(&loop);
1083 SetExpressionAsStatementPosition(stmt->each()); 1099 SetExpressionAsStatementPosition(stmt->each());
1084 1100
1085 // Load the current count to a0, load the length to a1. 1101 // Load the current count to a0, load the length to a1.
1086 __ ld(a0, MemOperand(sp, 0 * kPointerSize)); 1102 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1087 __ ld(a1, MemOperand(sp, 1 * kPointerSize)); 1103 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
(...skipping 21 matching lines...) Expand all
1109 int const vector_index = SmiFromSlot(slot)->value(); 1125 int const vector_index = SmiFromSlot(slot)->value();
1110 __ EmitLoadTypeFeedbackVector(a0); 1126 __ EmitLoadTypeFeedbackVector(a0);
1111 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1127 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1112 __ sd(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index))); 1128 __ sd(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1113 1129
1114 // Convert the entry to a string or (smi) 0 if it isn't a property 1130 // Convert the entry to a string or (smi) 0 if it isn't a property
1115 // any more. If the property has been removed while iterating, we 1131 // any more. If the property has been removed while iterating, we
1116 // just skip it. 1132 // just skip it.
1117 __ Push(a1, a3); // Enumerable and current entry. 1133 __ Push(a1, a3); // Enumerable and current entry.
1118 __ CallRuntime(Runtime::kForInFilter); 1134 __ CallRuntime(Runtime::kForInFilter);
1119 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); 1135 PrepareForBailoutForId(stmt->FilterId(),
1136 Deoptimizer::BailoutState::TOS_REGISTER);
1120 __ mov(a3, result_register()); 1137 __ mov(a3, result_register());
1121 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1138 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1122 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at)); 1139 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1123 1140
1124 // Update the 'each' property or variable from the possibly filtered 1141 // Update the 'each' property or variable from the possibly filtered
1125 // entry in register a3. 1142 // entry in register a3.
1126 __ bind(&update_each); 1143 __ bind(&update_each);
1127 __ mov(result_register(), a3); 1144 __ mov(result_register(), a3);
1128 // Perform the assignment as if via '='. 1145 // Perform the assignment as if via '='.
1129 { EffectContext context(this); 1146 { EffectContext context(this);
1130 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); 1147 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1131 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); 1148 PrepareForBailoutForId(stmt->AssignmentId(),
1149 Deoptimizer::BailoutState::NO_REGISTERS);
1132 } 1150 }
1133 1151
1134 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). 1152 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1135 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1153 PrepareForBailoutForId(stmt->BodyId(),
1154 Deoptimizer::BailoutState::NO_REGISTERS);
1136 // Generate code for the body of the loop. 1155 // Generate code for the body of the loop.
1137 Visit(stmt->body()); 1156 Visit(stmt->body());
1138 1157
1139 // Generate code for the going to the next element by incrementing 1158 // Generate code for the going to the next element by incrementing
1140 // the index (smi) stored on top of the stack. 1159 // the index (smi) stored on top of the stack.
1141 __ bind(loop_statement.continue_label()); 1160 __ bind(loop_statement.continue_label());
1142 __ pop(a0); 1161 __ pop(a0);
1143 __ Daddu(a0, a0, Operand(Smi::FromInt(1))); 1162 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1144 __ push(a0); 1163 __ push(a0);
1145 1164
1146 EmitBackEdgeBookkeeping(stmt, &loop); 1165 EmitBackEdgeBookkeeping(stmt, &loop);
1147 __ Branch(&loop); 1166 __ Branch(&loop);
1148 1167
1149 // Remove the pointers stored on the stack. 1168 // Remove the pointers stored on the stack.
1150 __ bind(loop_statement.break_label()); 1169 __ bind(loop_statement.break_label());
1151 DropOperands(5); 1170 DropOperands(5);
1152 1171
1153 // Exit and decrement the loop depth. 1172 // Exit and decrement the loop depth.
1154 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1173 PrepareForBailoutForId(stmt->ExitId(),
1174 Deoptimizer::BailoutState::NO_REGISTERS);
1155 __ bind(&exit); 1175 __ bind(&exit);
1156 decrement_loop_depth(); 1176 decrement_loop_depth();
1157 } 1177 }
1158 1178
1159 1179
1160 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1180 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1161 FeedbackVectorSlot slot) { 1181 FeedbackVectorSlot slot) {
1162 DCHECK(NeedsHomeObject(initializer)); 1182 DCHECK(NeedsHomeObject(initializer));
1163 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1183 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1164 __ li(StoreDescriptor::NameRegister(), 1184 __ li(StoreDescriptor::NameRegister(),
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
1303 __ li(LoadDescriptor::SlotRegister(), 1323 __ li(LoadDescriptor::SlotRegister(),
1304 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); 1324 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1305 CallLoadIC(typeof_mode); 1325 CallLoadIC(typeof_mode);
1306 } 1326 }
1307 1327
1308 1328
1309 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1329 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1310 TypeofMode typeof_mode) { 1330 TypeofMode typeof_mode) {
1311 // Record position before possible IC call. 1331 // Record position before possible IC call.
1312 SetExpressionPosition(proxy); 1332 SetExpressionPosition(proxy);
1313 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); 1333 PrepareForBailoutForId(proxy->BeforeId(),
1334 Deoptimizer::BailoutState::NO_REGISTERS);
1314 Variable* var = proxy->var(); 1335 Variable* var = proxy->var();
1315 1336
1316 // Three cases: global variables, lookup variables, and all other types of 1337 // Three cases: global variables, lookup variables, and all other types of
1317 // variables. 1338 // variables.
1318 switch (var->location()) { 1339 switch (var->location()) {
1319 case VariableLocation::GLOBAL: 1340 case VariableLocation::GLOBAL:
1320 case VariableLocation::UNALLOCATED: { 1341 case VariableLocation::UNALLOCATED: {
1321 Comment cmnt(masm_, "[ Global variable"); 1342 Comment cmnt(masm_, "[ Global variable");
1322 EmitGlobalVariableLoad(proxy, typeof_mode); 1343 EmitGlobalVariableLoad(proxy, typeof_mode);
1323 context()->Plug(v0); 1344 context()->Plug(v0);
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1410 __ li(a1, Operand(constant_properties)); 1431 __ li(a1, Operand(constant_properties));
1411 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); 1432 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1412 if (MustCreateObjectLiteralWithRuntime(expr)) { 1433 if (MustCreateObjectLiteralWithRuntime(expr)) {
1413 __ Push(a3, a2, a1, a0); 1434 __ Push(a3, a2, a1, a0);
1414 __ CallRuntime(Runtime::kCreateObjectLiteral); 1435 __ CallRuntime(Runtime::kCreateObjectLiteral);
1415 } else { 1436 } else {
1416 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); 1437 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1417 __ CallStub(&stub); 1438 __ CallStub(&stub);
1418 RestoreContext(); 1439 RestoreContext();
1419 } 1440 }
1420 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1441 PrepareForBailoutForId(expr->CreateLiteralId(),
1442 Deoptimizer::BailoutState::TOS_REGISTER);
1421 1443
1422 // If result_saved is true the result is on top of the stack. If 1444 // If result_saved is true the result is on top of the stack. If
1423 // result_saved is false the result is in v0. 1445 // result_saved is false the result is in v0.
1424 bool result_saved = false; 1446 bool result_saved = false;
1425 1447
1426 AccessorTable accessor_table(zone()); 1448 AccessorTable accessor_table(zone());
1427 int property_index = 0; 1449 int property_index = 0;
1428 for (; property_index < expr->properties()->length(); property_index++) { 1450 for (; property_index < expr->properties()->length(); property_index++) {
1429 ObjectLiteral::Property* property = expr->properties()->at(property_index); 1451 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1430 if (property->is_computed_name()) break; 1452 if (property->is_computed_name()) break;
(...skipping 16 matching lines...) Expand all
1447 // contains computed properties with an uninitialized value. 1469 // contains computed properties with an uninitialized value.
1448 if (key->value()->IsInternalizedString()) { 1470 if (key->value()->IsInternalizedString()) {
1449 if (property->emit_store()) { 1471 if (property->emit_store()) {
1450 VisitForAccumulatorValue(value); 1472 VisitForAccumulatorValue(value);
1451 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1473 __ mov(StoreDescriptor::ValueRegister(), result_register());
1452 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 1474 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1453 __ li(StoreDescriptor::NameRegister(), Operand(key->value())); 1475 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1454 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1476 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1455 EmitLoadStoreICSlot(property->GetSlot(0)); 1477 EmitLoadStoreICSlot(property->GetSlot(0));
1456 CallStoreIC(); 1478 CallStoreIC();
1457 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1479 PrepareForBailoutForId(key->id(),
1480 Deoptimizer::BailoutState::NO_REGISTERS);
1458 1481
1459 if (NeedsHomeObject(value)) { 1482 if (NeedsHomeObject(value)) {
1460 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1483 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1461 } 1484 }
1462 } else { 1485 } else {
1463 VisitForEffect(value); 1486 VisitForEffect(value);
1464 } 1487 }
1465 break; 1488 break;
1466 } 1489 }
1467 // Duplicate receiver on stack. 1490 // Duplicate receiver on stack.
(...skipping 13 matching lines...) Expand all
1481 } 1504 }
1482 break; 1505 break;
1483 case ObjectLiteral::Property::PROTOTYPE: 1506 case ObjectLiteral::Property::PROTOTYPE:
1484 // Duplicate receiver on stack. 1507 // Duplicate receiver on stack.
1485 __ ld(a0, MemOperand(sp)); 1508 __ ld(a0, MemOperand(sp));
1486 PushOperand(a0); 1509 PushOperand(a0);
1487 VisitForStackValue(value); 1510 VisitForStackValue(value);
1488 DCHECK(property->emit_store()); 1511 DCHECK(property->emit_store());
1489 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1512 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1490 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1513 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1491 NO_REGISTERS); 1514 Deoptimizer::BailoutState::NO_REGISTERS);
1492 break; 1515 break;
1493 case ObjectLiteral::Property::GETTER: 1516 case ObjectLiteral::Property::GETTER:
1494 if (property->emit_store()) { 1517 if (property->emit_store()) {
1495 accessor_table.lookup(key)->second->getter = property; 1518 accessor_table.lookup(key)->second->getter = property;
1496 } 1519 }
1497 break; 1520 break;
1498 case ObjectLiteral::Property::SETTER: 1521 case ObjectLiteral::Property::SETTER:
1499 if (property->emit_store()) { 1522 if (property->emit_store()) {
1500 accessor_table.lookup(key)->second->setter = property; 1523 accessor_table.lookup(key)->second->setter = property;
1501 } 1524 }
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1538 1561
1539 __ ld(a0, MemOperand(sp)); // Duplicate receiver. 1562 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1540 PushOperand(a0); 1563 PushOperand(a0);
1541 1564
1542 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { 1565 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1543 DCHECK(!property->is_computed_name()); 1566 DCHECK(!property->is_computed_name());
1544 VisitForStackValue(value); 1567 VisitForStackValue(value);
1545 DCHECK(property->emit_store()); 1568 DCHECK(property->emit_store());
1546 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1569 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1547 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1570 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1548 NO_REGISTERS); 1571 Deoptimizer::BailoutState::NO_REGISTERS);
1549 } else { 1572 } else {
1550 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); 1573 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1551 VisitForStackValue(value); 1574 VisitForStackValue(value);
1552 if (NeedsHomeObject(value)) { 1575 if (NeedsHomeObject(value)) {
1553 EmitSetHomeObject(value, 2, property->GetSlot()); 1576 EmitSetHomeObject(value, 2, property->GetSlot());
1554 } 1577 }
1555 1578
1556 switch (property->kind()) { 1579 switch (property->kind()) {
1557 case ObjectLiteral::Property::CONSTANT: 1580 case ObjectLiteral::Property::CONSTANT:
1558 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1581 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1610 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1633 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1611 __ li(a1, Operand(constant_elements)); 1634 __ li(a1, Operand(constant_elements));
1612 if (MustCreateArrayLiteralWithRuntime(expr)) { 1635 if (MustCreateArrayLiteralWithRuntime(expr)) {
1613 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); 1636 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1614 __ Push(a3, a2, a1, a0); 1637 __ Push(a3, a2, a1, a0);
1615 __ CallRuntime(Runtime::kCreateArrayLiteral); 1638 __ CallRuntime(Runtime::kCreateArrayLiteral);
1616 } else { 1639 } else {
1617 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1640 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1618 __ CallStub(&stub); 1641 __ CallStub(&stub);
1619 } 1642 }
1620 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1643 PrepareForBailoutForId(expr->CreateLiteralId(),
1644 Deoptimizer::BailoutState::TOS_REGISTER);
1621 1645
1622 bool result_saved = false; // Is the result saved to the stack? 1646 bool result_saved = false; // Is the result saved to the stack?
1623 ZoneList<Expression*>* subexprs = expr->values(); 1647 ZoneList<Expression*>* subexprs = expr->values();
1624 int length = subexprs->length(); 1648 int length = subexprs->length();
1625 1649
1626 // Emit code to evaluate all the non-constant subexpressions and to store 1650 // Emit code to evaluate all the non-constant subexpressions and to store
1627 // them into the newly cloned array. 1651 // them into the newly cloned array.
1628 int array_index = 0; 1652 int array_index = 0;
1629 for (; array_index < length; array_index++) { 1653 for (; array_index < length; array_index++) {
1630 Expression* subexpr = subexprs->at(array_index); 1654 Expression* subexpr = subexprs->at(array_index);
(...skipping 11 matching lines...) Expand all
1642 VisitForAccumulatorValue(subexpr); 1666 VisitForAccumulatorValue(subexpr);
1643 1667
1644 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index))); 1668 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1645 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1669 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1646 __ mov(StoreDescriptor::ValueRegister(), result_register()); 1670 __ mov(StoreDescriptor::ValueRegister(), result_register());
1647 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); 1671 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1648 Handle<Code> ic = 1672 Handle<Code> ic =
1649 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 1673 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1650 CallIC(ic); 1674 CallIC(ic);
1651 1675
1652 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); 1676 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1677 Deoptimizer::BailoutState::NO_REGISTERS);
1653 } 1678 }
1654 1679
1655 // In case the array literal contains spread expressions it has two parts. The 1680 // In case the array literal contains spread expressions it has two parts. The
1656 // first part is the "static" array which has a literal index is handled 1681 // first part is the "static" array which has a literal index is handled
1657 // above. The second part is the part after the first spread expression 1682 // above. The second part is the part after the first spread expression
1658 // (inclusive) and these elements gets appended to the array. Note that the 1683 // (inclusive) and these elements gets appended to the array. Note that the
1659 // number elements an iterable produces is unknown ahead of time. 1684 // number elements an iterable produces is unknown ahead of time.
1660 if (array_index < length && result_saved) { 1685 if (array_index < length && result_saved) {
1661 PopOperand(v0); 1686 PopOperand(v0);
1662 result_saved = false; 1687 result_saved = false;
1663 } 1688 }
1664 for (; array_index < length; array_index++) { 1689 for (; array_index < length; array_index++) {
1665 Expression* subexpr = subexprs->at(array_index); 1690 Expression* subexpr = subexprs->at(array_index);
1666 1691
1667 PushOperand(v0); 1692 PushOperand(v0);
1668 DCHECK(!subexpr->IsSpread()); 1693 DCHECK(!subexpr->IsSpread());
1669 VisitForStackValue(subexpr); 1694 VisitForStackValue(subexpr);
1670 CallRuntimeWithOperands(Runtime::kAppendElement); 1695 CallRuntimeWithOperands(Runtime::kAppendElement);
1671 1696
1672 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); 1697 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1698 Deoptimizer::BailoutState::NO_REGISTERS);
1673 } 1699 }
1674 1700
1675 if (result_saved) { 1701 if (result_saved) {
1676 context()->PlugTOS(); 1702 context()->PlugTOS();
1677 } else { 1703 } else {
1678 context()->Plug(v0); 1704 context()->Plug(v0);
1679 } 1705 }
1680 } 1706 }
1681 1707
1682 1708
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1745 break; 1771 break;
1746 } 1772 }
1747 1773
1748 // For compound assignments we need another deoptimization point after the 1774 // For compound assignments we need another deoptimization point after the
1749 // variable/property load. 1775 // variable/property load.
1750 if (expr->is_compound()) { 1776 if (expr->is_compound()) {
1751 { AccumulatorValueContext context(this); 1777 { AccumulatorValueContext context(this);
1752 switch (assign_type) { 1778 switch (assign_type) {
1753 case VARIABLE: 1779 case VARIABLE:
1754 EmitVariableLoad(expr->target()->AsVariableProxy()); 1780 EmitVariableLoad(expr->target()->AsVariableProxy());
1755 PrepareForBailout(expr->target(), TOS_REG); 1781 PrepareForBailout(expr->target(),
1782 Deoptimizer::BailoutState::TOS_REGISTER);
1756 break; 1783 break;
1757 case NAMED_PROPERTY: 1784 case NAMED_PROPERTY:
1758 EmitNamedPropertyLoad(property); 1785 EmitNamedPropertyLoad(property);
1759 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1786 PrepareForBailoutForId(property->LoadId(),
1787 Deoptimizer::BailoutState::TOS_REGISTER);
1760 break; 1788 break;
1761 case NAMED_SUPER_PROPERTY: 1789 case NAMED_SUPER_PROPERTY:
1762 EmitNamedSuperPropertyLoad(property); 1790 EmitNamedSuperPropertyLoad(property);
1763 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1791 PrepareForBailoutForId(property->LoadId(),
1792 Deoptimizer::BailoutState::TOS_REGISTER);
1764 break; 1793 break;
1765 case KEYED_SUPER_PROPERTY: 1794 case KEYED_SUPER_PROPERTY:
1766 EmitKeyedSuperPropertyLoad(property); 1795 EmitKeyedSuperPropertyLoad(property);
1767 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1796 PrepareForBailoutForId(property->LoadId(),
1797 Deoptimizer::BailoutState::TOS_REGISTER);
1768 break; 1798 break;
1769 case KEYED_PROPERTY: 1799 case KEYED_PROPERTY:
1770 EmitKeyedPropertyLoad(property); 1800 EmitKeyedPropertyLoad(property);
1771 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1801 PrepareForBailoutForId(property->LoadId(),
1802 Deoptimizer::BailoutState::TOS_REGISTER);
1772 break; 1803 break;
1773 } 1804 }
1774 } 1805 }
1775 1806
1776 Token::Value op = expr->binary_op(); 1807 Token::Value op = expr->binary_op();
1777 PushOperand(v0); // Left operand goes on the stack. 1808 PushOperand(v0); // Left operand goes on the stack.
1778 VisitForAccumulatorValue(expr->value()); 1809 VisitForAccumulatorValue(expr->value());
1779 1810
1780 AccumulatorValueContext context(this); 1811 AccumulatorValueContext context(this);
1781 if (ShouldInlineSmiCase(op)) { 1812 if (ShouldInlineSmiCase(op)) {
1782 EmitInlineSmiBinaryOp(expr->binary_operation(), 1813 EmitInlineSmiBinaryOp(expr->binary_operation(),
1783 op, 1814 op,
1784 expr->target(), 1815 expr->target(),
1785 expr->value()); 1816 expr->value());
1786 } else { 1817 } else {
1787 EmitBinaryOp(expr->binary_operation(), op); 1818 EmitBinaryOp(expr->binary_operation(), op);
1788 } 1819 }
1789 1820
1790 // Deoptimization point in case the binary operation may have side effects. 1821 // Deoptimization point in case the binary operation may have side effects.
1791 PrepareForBailout(expr->binary_operation(), TOS_REG); 1822 PrepareForBailout(expr->binary_operation(),
1823 Deoptimizer::BailoutState::TOS_REGISTER);
1792 } else { 1824 } else {
1793 VisitForAccumulatorValue(expr->value()); 1825 VisitForAccumulatorValue(expr->value());
1794 } 1826 }
1795 1827
1796 SetExpressionPosition(expr); 1828 SetExpressionPosition(expr);
1797 1829
1798 // Store the value. 1830 // Store the value.
1799 switch (assign_type) { 1831 switch (assign_type) {
1800 case VARIABLE: 1832 case VARIABLE:
1801 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1833 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1802 expr->op(), expr->AssignmentSlot()); 1834 expr->op(), expr->AssignmentSlot());
1803 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1835 PrepareForBailoutForId(expr->AssignmentId(),
1836 Deoptimizer::BailoutState::TOS_REGISTER);
1804 context()->Plug(v0); 1837 context()->Plug(v0);
1805 break; 1838 break;
1806 case NAMED_PROPERTY: 1839 case NAMED_PROPERTY:
1807 EmitNamedPropertyAssignment(expr); 1840 EmitNamedPropertyAssignment(expr);
1808 break; 1841 break;
1809 case NAMED_SUPER_PROPERTY: 1842 case NAMED_SUPER_PROPERTY:
1810 EmitNamedSuperPropertyStore(property); 1843 EmitNamedSuperPropertyStore(property);
1811 context()->Plug(v0); 1844 context()->Plug(v0);
1812 break; 1845 break;
1813 case KEYED_SUPER_PROPERTY: 1846 case KEYED_SUPER_PROPERTY:
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
2268 DCHECK(prop != NULL); 2301 DCHECK(prop != NULL);
2269 DCHECK(prop->key()->IsLiteral()); 2302 DCHECK(prop->key()->IsLiteral());
2270 2303
2271 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2304 __ mov(StoreDescriptor::ValueRegister(), result_register());
2272 __ li(StoreDescriptor::NameRegister(), 2305 __ li(StoreDescriptor::NameRegister(),
2273 Operand(prop->key()->AsLiteral()->value())); 2306 Operand(prop->key()->AsLiteral()->value()));
2274 PopOperand(StoreDescriptor::ReceiverRegister()); 2307 PopOperand(StoreDescriptor::ReceiverRegister());
2275 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2308 EmitLoadStoreICSlot(expr->AssignmentSlot());
2276 CallStoreIC(); 2309 CallStoreIC();
2277 2310
2278 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2311 PrepareForBailoutForId(expr->AssignmentId(),
2312 Deoptimizer::BailoutState::TOS_REGISTER);
2279 context()->Plug(v0); 2313 context()->Plug(v0);
2280 } 2314 }
2281 2315
2282 2316
2283 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { 2317 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2284 // Assignment to named property of super. 2318 // Assignment to named property of super.
2285 // v0 : value 2319 // v0 : value
2286 // stack : receiver ('this'), home_object 2320 // stack : receiver ('this'), home_object
2287 DCHECK(prop != NULL); 2321 DCHECK(prop != NULL);
2288 Literal* key = prop->key()->AsLiteral(); 2322 Literal* key = prop->key()->AsLiteral();
(...skipping 30 matching lines...) Expand all
2319 __ mov(StoreDescriptor::ValueRegister(), result_register()); 2353 __ mov(StoreDescriptor::ValueRegister(), result_register());
2320 PopOperands(StoreDescriptor::ReceiverRegister(), 2354 PopOperands(StoreDescriptor::ReceiverRegister(),
2321 StoreDescriptor::NameRegister()); 2355 StoreDescriptor::NameRegister());
2322 DCHECK(StoreDescriptor::ValueRegister().is(a0)); 2356 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2323 2357
2324 Handle<Code> ic = 2358 Handle<Code> ic =
2325 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 2359 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2326 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2360 EmitLoadStoreICSlot(expr->AssignmentSlot());
2327 CallIC(ic); 2361 CallIC(ic);
2328 2362
2329 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2363 PrepareForBailoutForId(expr->AssignmentId(),
2364 Deoptimizer::BailoutState::TOS_REGISTER);
2330 context()->Plug(v0); 2365 context()->Plug(v0);
2331 } 2366 }
2332 2367
2333 2368
2334 void FullCodeGenerator::CallIC(Handle<Code> code, 2369 void FullCodeGenerator::CallIC(Handle<Code> code,
2335 TypeFeedbackId id) { 2370 TypeFeedbackId id) {
2336 ic_total_count_++; 2371 ic_total_count_++;
2337 __ Call(code, RelocInfo::CODE_TARGET, id); 2372 __ Call(code, RelocInfo::CODE_TARGET, id);
2338 } 2373 }
2339 2374
2340 2375
2341 // Code common for calls using the IC. 2376 // Code common for calls using the IC.
2342 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2377 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2343 Expression* callee = expr->expression(); 2378 Expression* callee = expr->expression();
2344 2379
2345 // Get the target function. 2380 // Get the target function.
2346 ConvertReceiverMode convert_mode; 2381 ConvertReceiverMode convert_mode;
2347 if (callee->IsVariableProxy()) { 2382 if (callee->IsVariableProxy()) {
2348 { StackValueContext context(this); 2383 { StackValueContext context(this);
2349 EmitVariableLoad(callee->AsVariableProxy()); 2384 EmitVariableLoad(callee->AsVariableProxy());
2350 PrepareForBailout(callee, NO_REGISTERS); 2385 PrepareForBailout(callee, Deoptimizer::BailoutState::NO_REGISTERS);
2351 } 2386 }
2352 // Push undefined as receiver. This is patched in the method prologue if it 2387 // Push undefined as receiver. This is patched in the method prologue if it
2353 // is a sloppy mode method. 2388 // is a sloppy mode method.
2354 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 2389 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2355 PushOperand(at); 2390 PushOperand(at);
2356 convert_mode = ConvertReceiverMode::kNullOrUndefined; 2391 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2357 } else { 2392 } else {
2358 // Load the function from the receiver. 2393 // Load the function from the receiver.
2359 DCHECK(callee->IsProperty()); 2394 DCHECK(callee->IsProperty());
2360 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2395 DCHECK(!callee->AsProperty()->IsSuperAccess());
2361 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2396 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2362 EmitNamedPropertyLoad(callee->AsProperty()); 2397 EmitNamedPropertyLoad(callee->AsProperty());
2363 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2398 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2399 Deoptimizer::BailoutState::TOS_REGISTER);
2364 // Push the target function under the receiver. 2400 // Push the target function under the receiver.
2365 __ ld(at, MemOperand(sp, 0)); 2401 __ ld(at, MemOperand(sp, 0));
2366 PushOperand(at); 2402 PushOperand(at);
2367 __ sd(v0, MemOperand(sp, kPointerSize)); 2403 __ sd(v0, MemOperand(sp, kPointerSize));
2368 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 2404 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2369 } 2405 }
2370 2406
2371 EmitCall(expr, convert_mode); 2407 EmitCall(expr, convert_mode);
2372 } 2408 }
2373 2409
(...skipping 16 matching lines...) Expand all
2390 PushOperands(scratch, v0, v0, scratch); 2426 PushOperands(scratch, v0, v0, scratch);
2391 PushOperand(key->value()); 2427 PushOperand(key->value());
2392 2428
2393 // Stack here: 2429 // Stack here:
2394 // - home_object 2430 // - home_object
2395 // - this (receiver) 2431 // - this (receiver)
2396 // - this (receiver) <-- LoadFromSuper will pop here and below. 2432 // - this (receiver) <-- LoadFromSuper will pop here and below.
2397 // - home_object 2433 // - home_object
2398 // - key 2434 // - key
2399 CallRuntimeWithOperands(Runtime::kLoadFromSuper); 2435 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2400 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 2436 PrepareForBailoutForId(prop->LoadId(),
2437 Deoptimizer::BailoutState::TOS_REGISTER);
2401 2438
2402 // Replace home_object with target function. 2439 // Replace home_object with target function.
2403 __ sd(v0, MemOperand(sp, kPointerSize)); 2440 __ sd(v0, MemOperand(sp, kPointerSize));
2404 2441
2405 // Stack here: 2442 // Stack here:
2406 // - target function 2443 // - target function
2407 // - this (receiver) 2444 // - this (receiver)
2408 EmitCall(expr); 2445 EmitCall(expr);
2409 } 2446 }
2410 2447
2411 2448
2412 // Code common for calls using the IC. 2449 // Code common for calls using the IC.
2413 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2450 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2414 Expression* key) { 2451 Expression* key) {
2415 // Load the key. 2452 // Load the key.
2416 VisitForAccumulatorValue(key); 2453 VisitForAccumulatorValue(key);
2417 2454
2418 Expression* callee = expr->expression(); 2455 Expression* callee = expr->expression();
2419 2456
2420 // Load the function from the receiver. 2457 // Load the function from the receiver.
2421 DCHECK(callee->IsProperty()); 2458 DCHECK(callee->IsProperty());
2422 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2459 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2423 __ Move(LoadDescriptor::NameRegister(), v0); 2460 __ Move(LoadDescriptor::NameRegister(), v0);
2424 EmitKeyedPropertyLoad(callee->AsProperty()); 2461 EmitKeyedPropertyLoad(callee->AsProperty());
2425 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2462 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2463 Deoptimizer::BailoutState::TOS_REGISTER);
2426 2464
2427 // Push the target function under the receiver. 2465 // Push the target function under the receiver.
2428 __ ld(at, MemOperand(sp, 0)); 2466 __ ld(at, MemOperand(sp, 0));
2429 PushOperand(at); 2467 PushOperand(at);
2430 __ sd(v0, MemOperand(sp, kPointerSize)); 2468 __ sd(v0, MemOperand(sp, kPointerSize));
2431 2469
2432 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 2470 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2433 } 2471 }
2434 2472
2435 2473
(...skipping 13 matching lines...) Expand all
2449 PushOperands(scratch, v0, v0, scratch); 2487 PushOperands(scratch, v0, v0, scratch);
2450 VisitForStackValue(prop->key()); 2488 VisitForStackValue(prop->key());
2451 2489
2452 // Stack here: 2490 // Stack here:
2453 // - home_object 2491 // - home_object
2454 // - this (receiver) 2492 // - this (receiver)
2455 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. 2493 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2456 // - home_object 2494 // - home_object
2457 // - key 2495 // - key
2458 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); 2496 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2459 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 2497 PrepareForBailoutForId(prop->LoadId(),
2498 Deoptimizer::BailoutState::TOS_REGISTER);
2460 2499
2461 // Replace home_object with target function. 2500 // Replace home_object with target function.
2462 __ sd(v0, MemOperand(sp, kPointerSize)); 2501 __ sd(v0, MemOperand(sp, kPointerSize));
2463 2502
2464 // Stack here: 2503 // Stack here:
2465 // - target function 2504 // - target function
2466 // - this (receiver) 2505 // - this (receiver)
2467 EmitCall(expr); 2506 EmitCall(expr);
2468 } 2507 }
2469 2508
2470 2509
2471 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 2510 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2472 // Load the arguments. 2511 // Load the arguments.
2473 ZoneList<Expression*>* args = expr->arguments(); 2512 ZoneList<Expression*>* args = expr->arguments();
2474 int arg_count = args->length(); 2513 int arg_count = args->length();
2475 for (int i = 0; i < arg_count; i++) { 2514 for (int i = 0; i < arg_count; i++) {
2476 VisitForStackValue(args->at(i)); 2515 VisitForStackValue(args->at(i));
2477 } 2516 }
2478 2517
2479 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 2518 PrepareForBailoutForId(expr->CallId(),
2519 Deoptimizer::BailoutState::NO_REGISTERS);
2480 // Record source position of the IC call. 2520 // Record source position of the IC call.
2481 SetCallPosition(expr, expr->tail_call_mode()); 2521 SetCallPosition(expr, expr->tail_call_mode());
2482 if (expr->tail_call_mode() == TailCallMode::kAllow) { 2522 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2483 if (FLAG_trace) { 2523 if (FLAG_trace) {
2484 __ CallRuntime(Runtime::kTraceTailCall); 2524 __ CallRuntime(Runtime::kTraceTailCall);
2485 } 2525 }
2486 // Update profiling counters before the tail call since we will 2526 // Update profiling counters before the tail call since we will
2487 // not return to this function. 2527 // not return to this function.
2488 EmitProfilingCounterHandlingForReturnSequence(true); 2528 EmitProfilingCounterHandlingForReturnSequence(true);
2489 } 2529 }
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
2539 // Generate code for loading from variables potentially shadowed by 2579 // Generate code for loading from variables potentially shadowed by
2540 // eval-introduced variables. 2580 // eval-introduced variables.
2541 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); 2581 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2542 2582
2543 __ bind(&slow); 2583 __ bind(&slow);
2544 // Call the runtime to find the function to call (returned in v0) 2584 // Call the runtime to find the function to call (returned in v0)
2545 // and the object holding it (returned in v1). 2585 // and the object holding it (returned in v1).
2546 __ Push(callee->name()); 2586 __ Push(callee->name());
2547 __ CallRuntime(Runtime::kLoadLookupSlotForCall); 2587 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2548 PushOperands(v0, v1); // Function, receiver. 2588 PushOperands(v0, v1); // Function, receiver.
2549 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); 2589 PrepareForBailoutForId(expr->LookupId(),
2590 Deoptimizer::BailoutState::NO_REGISTERS);
2550 2591
2551 // If fast case code has been generated, emit code to push the 2592 // If fast case code has been generated, emit code to push the
2552 // function and receiver and have the slow path jump around this 2593 // function and receiver and have the slow path jump around this
2553 // code. 2594 // code.
2554 if (done.is_linked()) { 2595 if (done.is_linked()) {
2555 Label call; 2596 Label call;
2556 __ Branch(&call); 2597 __ Branch(&call);
2557 __ bind(&done); 2598 __ bind(&done);
2558 // Push function. 2599 // Push function.
2559 __ push(v0); 2600 __ push(v0);
(...skipping 27 matching lines...) Expand all
2587 2628
2588 // Push a copy of the function (found below the arguments) and 2629 // Push a copy of the function (found below the arguments) and
2589 // resolve eval. 2630 // resolve eval.
2590 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2631 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2591 __ push(a1); 2632 __ push(a1);
2592 EmitResolvePossiblyDirectEval(expr); 2633 EmitResolvePossiblyDirectEval(expr);
2593 2634
2594 // Touch up the stack with the resolved function. 2635 // Touch up the stack with the resolved function.
2595 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2636 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2596 2637
2597 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); 2638 PrepareForBailoutForId(expr->EvalId(),
2639 Deoptimizer::BailoutState::NO_REGISTERS);
2598 // Record source position for debugger. 2640 // Record source position for debugger.
2599 SetCallPosition(expr); 2641 SetCallPosition(expr);
2600 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2642 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2601 __ li(a0, Operand(arg_count)); 2643 __ li(a0, Operand(arg_count));
2602 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, 2644 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2603 expr->tail_call_mode()), 2645 expr->tail_call_mode()),
2604 RelocInfo::CODE_TARGET); 2646 RelocInfo::CODE_TARGET);
2605 OperandStackDepthDecrement(arg_count + 1); 2647 OperandStackDepthDecrement(arg_count + 1);
2606 RecordJSReturnSite(expr); 2648 RecordJSReturnSite(expr);
2607 RestoreContext(); 2649 RestoreContext();
(...skipping 28 matching lines...) Expand all
2636 __ li(a0, Operand(arg_count)); 2678 __ li(a0, Operand(arg_count));
2637 __ ld(a1, MemOperand(sp, arg_count * kPointerSize)); 2679 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2638 2680
2639 // Record call targets in unoptimized code. 2681 // Record call targets in unoptimized code.
2640 __ EmitLoadTypeFeedbackVector(a2); 2682 __ EmitLoadTypeFeedbackVector(a2);
2641 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); 2683 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2642 2684
2643 CallConstructStub stub(isolate()); 2685 CallConstructStub stub(isolate());
2644 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); 2686 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2645 OperandStackDepthDecrement(arg_count + 1); 2687 OperandStackDepthDecrement(arg_count + 1);
2646 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2688 PrepareForBailoutForId(expr->ReturnId(),
2689 Deoptimizer::BailoutState::TOS_REGISTER);
2647 RestoreContext(); 2690 RestoreContext();
2648 context()->Plug(v0); 2691 context()->Plug(v0);
2649 } 2692 }
2650 2693
2651 2694
2652 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { 2695 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2653 SuperCallReference* super_call_ref = 2696 SuperCallReference* super_call_ref =
2654 expr->expression()->AsSuperCallReference(); 2697 expr->expression()->AsSuperCallReference();
2655 DCHECK_NOT_NULL(super_call_ref); 2698 DCHECK_NOT_NULL(super_call_ref);
2656 2699
(...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after
3084 } 3127 }
3085 3128
3086 3129
3087 void FullCodeGenerator::EmitCall(CallRuntime* expr) { 3130 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3088 ZoneList<Expression*>* args = expr->arguments(); 3131 ZoneList<Expression*>* args = expr->arguments();
3089 DCHECK_LE(2, args->length()); 3132 DCHECK_LE(2, args->length());
3090 // Push target, receiver and arguments onto the stack. 3133 // Push target, receiver and arguments onto the stack.
3091 for (Expression* const arg : *args) { 3134 for (Expression* const arg : *args) {
3092 VisitForStackValue(arg); 3135 VisitForStackValue(arg);
3093 } 3136 }
3094 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 3137 PrepareForBailoutForId(expr->CallId(),
3138 Deoptimizer::BailoutState::NO_REGISTERS);
3095 // Move target to a1. 3139 // Move target to a1.
3096 int const argc = args->length() - 2; 3140 int const argc = args->length() - 2;
3097 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize)); 3141 __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3098 // Call the target. 3142 // Call the target.
3099 __ li(a0, Operand(argc)); 3143 __ li(a0, Operand(argc));
3100 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 3144 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3101 OperandStackDepthDecrement(argc + 1); 3145 OperandStackDepthDecrement(argc + 1);
3102 RestoreContext(); 3146 RestoreContext();
3103 // Discard the function left on TOS. 3147 // Discard the function left on TOS.
3104 context()->DropAndPlug(1, v0); 3148 context()->DropAndPlug(1, v0);
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
3295 // because we need to prepare a pair of extra administrative AST ids 3339 // because we need to prepare a pair of extra administrative AST ids
3296 // for the optimizing compiler. 3340 // for the optimizing compiler.
3297 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 3341 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3298 Label materialize_true, materialize_false, done; 3342 Label materialize_true, materialize_false, done;
3299 VisitForControl(expr->expression(), 3343 VisitForControl(expr->expression(),
3300 &materialize_false, 3344 &materialize_false,
3301 &materialize_true, 3345 &materialize_true,
3302 &materialize_true); 3346 &materialize_true);
3303 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); 3347 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3304 __ bind(&materialize_true); 3348 __ bind(&materialize_true);
3305 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 3349 PrepareForBailoutForId(expr->MaterializeTrueId(),
3350 Deoptimizer::BailoutState::NO_REGISTERS);
3306 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 3351 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3307 if (context()->IsStackValue()) __ push(v0); 3352 if (context()->IsStackValue()) __ push(v0);
3308 __ jmp(&done); 3353 __ jmp(&done);
3309 __ bind(&materialize_false); 3354 __ bind(&materialize_false);
3310 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 3355 PrepareForBailoutForId(expr->MaterializeFalseId(),
3356 Deoptimizer::BailoutState::NO_REGISTERS);
3311 __ LoadRoot(v0, Heap::kFalseValueRootIndex); 3357 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3312 if (context()->IsStackValue()) __ push(v0); 3358 if (context()->IsStackValue()) __ push(v0);
3313 __ bind(&done); 3359 __ bind(&done);
3314 } 3360 }
3315 break; 3361 break;
3316 } 3362 }
3317 3363
3318 case Token::TYPEOF: { 3364 case Token::TYPEOF: {
3319 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 3365 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3320 { 3366 {
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
3400 } 3446 }
3401 3447
3402 case VARIABLE: 3448 case VARIABLE:
3403 UNREACHABLE(); 3449 UNREACHABLE();
3404 } 3450 }
3405 } 3451 }
3406 3452
3407 // We need a second deoptimization point after loading the value 3453 // We need a second deoptimization point after loading the value
3408 // in case evaluating the property load my have a side effect. 3454 // in case evaluating the property load my have a side effect.
3409 if (assign_type == VARIABLE) { 3455 if (assign_type == VARIABLE) {
3410 PrepareForBailout(expr->expression(), TOS_REG); 3456 PrepareForBailout(expr->expression(),
3457 Deoptimizer::BailoutState::TOS_REGISTER);
3411 } else { 3458 } else {
3412 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 3459 PrepareForBailoutForId(prop->LoadId(),
3460 Deoptimizer::BailoutState::TOS_REGISTER);
3413 } 3461 }
3414 3462
3415 // Inline smi case if we are in a loop. 3463 // Inline smi case if we are in a loop.
3416 Label stub_call, done; 3464 Label stub_call, done;
3417 JumpPatchSite patch_site(masm_); 3465 JumpPatchSite patch_site(masm_);
3418 3466
3419 int count_value = expr->op() == Token::INC ? 1 : -1; 3467 int count_value = expr->op() == Token::INC ? 1 : -1;
3420 __ mov(a0, v0); 3468 __ mov(a0, v0);
3421 if (ShouldInlineSmiCase(expr->op())) { 3469 if (ShouldInlineSmiCase(expr->op())) {
3422 Label slow; 3470 Label slow;
(...skipping 30 matching lines...) Expand all
3453 __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done); 3501 __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3454 // Call stub. Undo operation first. 3502 // Call stub. Undo operation first.
3455 __ Move(v0, a0); 3503 __ Move(v0, a0);
3456 __ jmp(&stub_call); 3504 __ jmp(&stub_call);
3457 __ bind(&slow); 3505 __ bind(&slow);
3458 } 3506 }
3459 3507
3460 // Convert old value into a number. 3508 // Convert old value into a number.
3461 ToNumberStub convert_stub(isolate()); 3509 ToNumberStub convert_stub(isolate());
3462 __ CallStub(&convert_stub); 3510 __ CallStub(&convert_stub);
3463 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); 3511 PrepareForBailoutForId(expr->ToNumberId(),
3512 Deoptimizer::BailoutState::TOS_REGISTER);
3464 3513
3465 // Save result for postfix expressions. 3514 // Save result for postfix expressions.
3466 if (expr->is_postfix()) { 3515 if (expr->is_postfix()) {
3467 if (!context()->IsEffect()) { 3516 if (!context()->IsEffect()) {
3468 // Save the result on the stack. If we have a named or keyed property 3517 // Save the result on the stack. If we have a named or keyed property
3469 // we store the result under the receiver that is currently on top 3518 // we store the result under the receiver that is currently on top
3470 // of the stack. 3519 // of the stack.
3471 switch (assign_type) { 3520 switch (assign_type) {
3472 case VARIABLE: 3521 case VARIABLE:
3473 PushOperand(v0); 3522 PushOperand(v0);
(...skipping 25 matching lines...) Expand all
3499 patch_site.EmitPatchInfo(); 3548 patch_site.EmitPatchInfo();
3500 __ bind(&done); 3549 __ bind(&done);
3501 3550
3502 // Store the value returned in v0. 3551 // Store the value returned in v0.
3503 switch (assign_type) { 3552 switch (assign_type) {
3504 case VARIABLE: 3553 case VARIABLE:
3505 if (expr->is_postfix()) { 3554 if (expr->is_postfix()) {
3506 { EffectContext context(this); 3555 { EffectContext context(this);
3507 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3556 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3508 Token::ASSIGN, expr->CountSlot()); 3557 Token::ASSIGN, expr->CountSlot());
3509 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3558 PrepareForBailoutForId(expr->AssignmentId(),
3559 Deoptimizer::BailoutState::TOS_REGISTER);
3510 context.Plug(v0); 3560 context.Plug(v0);
3511 } 3561 }
3512 // For all contexts except EffectConstant we have the result on 3562 // For all contexts except EffectConstant we have the result on
3513 // top of the stack. 3563 // top of the stack.
3514 if (!context()->IsEffect()) { 3564 if (!context()->IsEffect()) {
3515 context()->PlugTOS(); 3565 context()->PlugTOS();
3516 } 3566 }
3517 } else { 3567 } else {
3518 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3568 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3519 Token::ASSIGN, expr->CountSlot()); 3569 Token::ASSIGN, expr->CountSlot());
3520 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3570 PrepareForBailoutForId(expr->AssignmentId(),
3571 Deoptimizer::BailoutState::TOS_REGISTER);
3521 context()->Plug(v0); 3572 context()->Plug(v0);
3522 } 3573 }
3523 break; 3574 break;
3524 case NAMED_PROPERTY: { 3575 case NAMED_PROPERTY: {
3525 __ mov(StoreDescriptor::ValueRegister(), result_register()); 3576 __ mov(StoreDescriptor::ValueRegister(), result_register());
3526 __ li(StoreDescriptor::NameRegister(), 3577 __ li(StoreDescriptor::NameRegister(),
3527 Operand(prop->key()->AsLiteral()->value())); 3578 Operand(prop->key()->AsLiteral()->value()));
3528 PopOperand(StoreDescriptor::ReceiverRegister()); 3579 PopOperand(StoreDescriptor::ReceiverRegister());
3529 EmitLoadStoreICSlot(expr->CountSlot()); 3580 EmitLoadStoreICSlot(expr->CountSlot());
3530 CallStoreIC(); 3581 CallStoreIC();
3531 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3582 PrepareForBailoutForId(expr->AssignmentId(),
3583 Deoptimizer::BailoutState::TOS_REGISTER);
3532 if (expr->is_postfix()) { 3584 if (expr->is_postfix()) {
3533 if (!context()->IsEffect()) { 3585 if (!context()->IsEffect()) {
3534 context()->PlugTOS(); 3586 context()->PlugTOS();
3535 } 3587 }
3536 } else { 3588 } else {
3537 context()->Plug(v0); 3589 context()->Plug(v0);
3538 } 3590 }
3539 break; 3591 break;
3540 } 3592 }
3541 case NAMED_SUPER_PROPERTY: { 3593 case NAMED_SUPER_PROPERTY: {
(...skipping 19 matching lines...) Expand all
3561 break; 3613 break;
3562 } 3614 }
3563 case KEYED_PROPERTY: { 3615 case KEYED_PROPERTY: {
3564 __ mov(StoreDescriptor::ValueRegister(), result_register()); 3616 __ mov(StoreDescriptor::ValueRegister(), result_register());
3565 PopOperands(StoreDescriptor::ReceiverRegister(), 3617 PopOperands(StoreDescriptor::ReceiverRegister(),
3566 StoreDescriptor::NameRegister()); 3618 StoreDescriptor::NameRegister());
3567 Handle<Code> ic = 3619 Handle<Code> ic =
3568 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 3620 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3569 EmitLoadStoreICSlot(expr->CountSlot()); 3621 EmitLoadStoreICSlot(expr->CountSlot());
3570 CallIC(ic); 3622 CallIC(ic);
3571 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3623 PrepareForBailoutForId(expr->AssignmentId(),
3624 Deoptimizer::BailoutState::TOS_REGISTER);
3572 if (expr->is_postfix()) { 3625 if (expr->is_postfix()) {
3573 if (!context()->IsEffect()) { 3626 if (!context()->IsEffect()) {
3574 context()->PlugTOS(); 3627 context()->PlugTOS();
3575 } 3628 }
3576 } else { 3629 } else {
3577 context()->Plug(v0); 3630 context()->Plug(v0);
3578 } 3631 }
3579 break; 3632 break;
3580 } 3633 }
3581 } 3634 }
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after
3952 reinterpret_cast<uint64_t>( 4005 reinterpret_cast<uint64_t>(
3953 isolate->builtins()->OnStackReplacement()->entry())); 4006 isolate->builtins()->OnStackReplacement()->entry()));
3954 return ON_STACK_REPLACEMENT; 4007 return ON_STACK_REPLACEMENT;
3955 } 4008 }
3956 4009
3957 4010
3958 } // namespace internal 4011 } // namespace internal
3959 } // namespace v8 4012 } // namespace v8
3960 4013
3961 #endif // V8_TARGET_ARCH_MIPS64 4014 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698