Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(644)

Side by Side Diff: src/mips/full-codegen-mips.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/deoptimizer-mips.cc ('k') | src/mips/ic-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after
271 Comment cmnt(masm_, "[ Declarations"); 271 Comment cmnt(masm_, "[ Declarations");
272 scope()->VisitIllegalRedeclaration(this); 272 scope()->VisitIllegalRedeclaration(this);
273 273
274 } else { 274 } else {
275 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 275 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
276 { Comment cmnt(masm_, "[ Declarations"); 276 { Comment cmnt(masm_, "[ Declarations");
277 // For named function expressions, declare the function name as a 277 // For named function expressions, declare the function name as a
278 // constant. 278 // constant.
279 if (scope()->is_function_scope() && scope()->function() != NULL) { 279 if (scope()->is_function_scope() && scope()->function() != NULL) {
280 int ignored = 0; 280 int ignored = 0;
281 EmitDeclaration(scope()->function(), CONST, NULL, &ignored); 281 VariableProxy* proxy = scope()->function();
282 ASSERT(proxy->var()->mode() == CONST ||
283 proxy->var()->mode() == CONST_HARMONY);
284 EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
282 } 285 }
283 VisitDeclarations(scope()->declarations()); 286 VisitDeclarations(scope()->declarations());
284 } 287 }
285 288
286 { Comment cmnt(masm_, "[ Stack check"); 289 { Comment cmnt(masm_, "[ Stack check");
287 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); 290 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
288 Label ok; 291 Label ok;
289 __ LoadRoot(t0, Heap::kStackLimitRootIndex); 292 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
290 __ Branch(&ok, hs, sp, Operand(t0)); 293 __ Branch(&ok, hs, sp, Operand(t0));
291 StackCheckStub stub; 294 StackCheckStub stub;
(...skipping 429 matching lines...) Expand 10 before | Expand all | Expand 10 after
721 724
722 725
723 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, 726 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
724 VariableMode mode, 727 VariableMode mode,
725 FunctionLiteral* function, 728 FunctionLiteral* function,
726 int* global_count) { 729 int* global_count) {
727 // If it was not possible to allocate the variable at compile time, we 730 // If it was not possible to allocate the variable at compile time, we
728 // need to "declare" it at runtime to make sure it actually exists in the 731 // need to "declare" it at runtime to make sure it actually exists in the
729 // local context. 732 // local context.
730 Variable* variable = proxy->var(); 733 Variable* variable = proxy->var();
734 bool binding_needs_init =
735 mode == CONST || mode == CONST_HARMONY || mode == LET;
731 switch (variable->location()) { 736 switch (variable->location()) {
732 case Variable::UNALLOCATED: 737 case Variable::UNALLOCATED:
733 ++(*global_count); 738 ++(*global_count);
734 break; 739 break;
735 740
736 case Variable::PARAMETER: 741 case Variable::PARAMETER:
737 case Variable::LOCAL: 742 case Variable::LOCAL:
738 if (function != NULL) { 743 if (function != NULL) {
739 Comment cmnt(masm_, "[ Declaration"); 744 Comment cmnt(masm_, "[ Declaration");
740 VisitForAccumulatorValue(function); 745 VisitForAccumulatorValue(function);
741 __ sw(result_register(), StackOperand(variable)); 746 __ sw(result_register(), StackOperand(variable));
742 } else if (mode == CONST || mode == LET) { 747 } else if (binding_needs_init) {
743 Comment cmnt(masm_, "[ Declaration"); 748 Comment cmnt(masm_, "[ Declaration");
744 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 749 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
745 __ sw(t0, StackOperand(variable)); 750 __ sw(t0, StackOperand(variable));
746 } 751 }
747 break; 752 break;
748 753
749 case Variable::CONTEXT: 754 case Variable::CONTEXT:
750 // The variable in the decl always resides in the current function 755 // The variable in the decl always resides in the current function
751 // context. 756 // context.
752 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 757 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
(...skipping 15 matching lines...) Expand all
768 // We know that we have written a function, which is not a smi. 773 // We know that we have written a function, which is not a smi.
769 __ RecordWriteContextSlot(cp, 774 __ RecordWriteContextSlot(cp,
770 offset, 775 offset,
771 result_register(), 776 result_register(),
772 a2, 777 a2,
773 kRAHasBeenSaved, 778 kRAHasBeenSaved,
774 kDontSaveFPRegs, 779 kDontSaveFPRegs,
775 EMIT_REMEMBERED_SET, 780 EMIT_REMEMBERED_SET,
776 OMIT_SMI_CHECK); 781 OMIT_SMI_CHECK);
777 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 782 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
778 } else if (mode == CONST || mode == LET) { 783 } else if (binding_needs_init) {
779 Comment cmnt(masm_, "[ Declaration"); 784 Comment cmnt(masm_, "[ Declaration");
780 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 785 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
781 __ sw(at, ContextOperand(cp, variable->index())); 786 __ sw(at, ContextOperand(cp, variable->index()));
782 // No write barrier since the_hole_value is in old space. 787 // No write barrier since the_hole_value is in old space.
783 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 788 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
784 } 789 }
785 break; 790 break;
786 791
787 case Variable::LOOKUP: { 792 case Variable::LOOKUP: {
788 Comment cmnt(masm_, "[ Declaration"); 793 Comment cmnt(masm_, "[ Declaration");
789 __ li(a2, Operand(variable->name())); 794 __ li(a2, Operand(variable->name()));
790 // Declaration nodes are always introduced in one of three modes. 795 // Declaration nodes are always introduced in one of four modes.
791 ASSERT(mode == VAR || mode == CONST || mode == LET); 796 ASSERT(mode == VAR ||
792 PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; 797 mode == CONST ||
798 mode == CONST_HARMONY ||
799 mode == LET);
800 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
801 ? READ_ONLY : NONE;
793 __ li(a1, Operand(Smi::FromInt(attr))); 802 __ li(a1, Operand(Smi::FromInt(attr)));
794 // Push initial value, if any. 803 // Push initial value, if any.
795 // Note: For variables we must not push an initial value (such as 804 // Note: For variables we must not push an initial value (such as
796 // 'undefined') because we may have a (legal) redeclaration and we 805 // 'undefined') because we may have a (legal) redeclaration and we
797 // must not destroy the current value. 806 // must not destroy the current value.
798 if (function != NULL) { 807 if (function != NULL) {
799 __ Push(cp, a2, a1); 808 __ Push(cp, a2, a1);
800 // Push initial value for function declaration. 809 // Push initial value for function declaration.
801 VisitForStackValue(function); 810 VisitForStackValue(function);
802 } else if (mode == CONST || mode == LET) { 811 } else if (binding_needs_init) {
803 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 812 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
804 __ Push(cp, a2, a1, a0); 813 __ Push(cp, a2, a1, a0);
805 } else { 814 } else {
806 ASSERT(Smi::FromInt(0) == 0); 815 ASSERT(Smi::FromInt(0) == 0);
807 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 816 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
808 __ Push(cp, a2, a1, a0); 817 __ Push(cp, a2, a1, a0);
809 } 818 }
810 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 819 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
811 break; 820 break;
812 } 821 }
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
935 __ JumpIfSmi(a0, &convert); 944 __ JumpIfSmi(a0, &convert);
936 __ GetObjectType(a0, a1, a1); 945 __ GetObjectType(a0, a1, a1);
937 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 946 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
938 __ bind(&convert); 947 __ bind(&convert);
939 __ push(a0); 948 __ push(a0);
940 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 949 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
941 __ mov(a0, v0); 950 __ mov(a0, v0);
942 __ bind(&done_convert); 951 __ bind(&done_convert);
943 __ push(a0); 952 __ push(a0);
944 953
954 // Check for proxies.
955 Label call_runtime;
956 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
957 __ GetObjectType(a0, a1, a1);
958 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
959
945 // Check cache validity in generated code. This is a fast case for 960 // Check cache validity in generated code. This is a fast case for
946 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 961 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
947 // guarantee cache validity, call the runtime system to check cache 962 // guarantee cache validity, call the runtime system to check cache
948 // validity or get the property names in a fixed array. 963 // validity or get the property names in a fixed array.
949 Label next, call_runtime; 964 Label next;
950 // Preload a couple of values used in the loop. 965 // Preload a couple of values used in the loop.
951 Register empty_fixed_array_value = t2; 966 Register empty_fixed_array_value = t2;
952 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); 967 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
953 Register empty_descriptor_array_value = t3; 968 Register empty_descriptor_array_value = t3;
954 __ LoadRoot(empty_descriptor_array_value, 969 __ LoadRoot(empty_descriptor_array_value,
955 Heap::kEmptyDescriptorArrayRootIndex); 970 Heap::kEmptyDescriptorArrayRootIndex);
956 __ mov(a1, a0); 971 __ mov(a1, a0);
957 __ bind(&next); 972 __ bind(&next);
958 973
959 // Check that there are no elements. Register a1 contains the 974 // Check that there are no elements. Register a1 contains the
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1013 1028
1014 // Setup the four remaining stack slots. 1029 // Setup the four remaining stack slots.
1015 __ push(v0); // Map. 1030 __ push(v0); // Map.
1016 __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset)); 1031 __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset));
1017 __ li(a0, Operand(Smi::FromInt(0))); 1032 __ li(a0, Operand(Smi::FromInt(0)));
1018 // Push enumeration cache, enumeration cache length (as smi) and zero. 1033 // Push enumeration cache, enumeration cache length (as smi) and zero.
1019 __ Push(a2, a1, a0); 1034 __ Push(a2, a1, a0);
1020 __ jmp(&loop); 1035 __ jmp(&loop);
1021 1036
1022 // We got a fixed array in register v0. Iterate through that. 1037 // We got a fixed array in register v0. Iterate through that.
1038 Label non_proxy;
1023 __ bind(&fixed_array); 1039 __ bind(&fixed_array);
1024 __ li(a1, Operand(Smi::FromInt(0))); // Map (0) - force slow check. 1040 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1025 __ Push(a1, v0); 1041 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1042 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1043 __ GetObjectType(a2, a3, a3);
1044 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1045 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1046 __ bind(&non_proxy);
1047 __ Push(a1, v0); // Smi and array
1026 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); 1048 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1027 __ li(a0, Operand(Smi::FromInt(0))); 1049 __ li(a0, Operand(Smi::FromInt(0)));
1028 __ Push(a1, a0); // Fixed array length (as smi) and initial index. 1050 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1029 1051
1030 // Generate code for doing the condition check. 1052 // Generate code for doing the condition check.
1031 __ bind(&loop); 1053 __ bind(&loop);
1032 // Load the current count to a0, load the length to a1. 1054 // Load the current count to a0, load the length to a1.
1033 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); 1055 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1034 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); 1056 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1035 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); 1057 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1036 1058
1037 // Get the current entry of the array into register a3. 1059 // Get the current entry of the array into register a3.
1038 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); 1060 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1039 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1061 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1040 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); 1062 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1041 __ addu(t0, a2, t0); // Array base + scaled (smi) index. 1063 __ addu(t0, a2, t0); // Array base + scaled (smi) index.
1042 __ lw(a3, MemOperand(t0)); // Current entry. 1064 __ lw(a3, MemOperand(t0)); // Current entry.
1043 1065
1044 // Get the expected map from the stack or a zero map in the 1066 // Get the expected map from the stack or a smi in the
1045 // permanent slow case into register a2. 1067 // permanent slow case into register a2.
1046 __ lw(a2, MemOperand(sp, 3 * kPointerSize)); 1068 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1047 1069
1048 // Check if the expected map still matches that of the enumerable. 1070 // Check if the expected map still matches that of the enumerable.
1049 // If not, we have to filter the key. 1071 // If not, we may have to filter the key.
1050 Label update_each; 1072 Label update_each;
1051 __ lw(a1, MemOperand(sp, 4 * kPointerSize)); 1073 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1052 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); 1074 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1053 __ Branch(&update_each, eq, t0, Operand(a2)); 1075 __ Branch(&update_each, eq, t0, Operand(a2));
1054 1076
1077 // For proxies, no filtering is done.
1078 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1079 ASSERT_EQ(Smi::FromInt(0), 0);
1080 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1081
1055 // Convert the entry to a string or (smi) 0 if it isn't a property 1082 // Convert the entry to a string or (smi) 0 if it isn't a property
1056 // any more. If the property has been removed while iterating, we 1083 // any more. If the property has been removed while iterating, we
1057 // just skip it. 1084 // just skip it.
1058 __ push(a1); // Enumerable. 1085 __ push(a1); // Enumerable.
1059 __ push(a3); // Current entry. 1086 __ push(a3); // Current entry.
1060 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1087 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1061 __ mov(a3, result_register()); 1088 __ mov(a3, result_register());
1062 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); 1089 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1063 1090
1064 // Update the 'each' property or variable from the possibly filtered 1091 // Update the 'each' property or variable from the possibly filtered
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1099 // space for nested functions that don't need literals cloning. If 1126 // space for nested functions that don't need literals cloning. If
1100 // we're running with the --always-opt or the --prepare-always-opt 1127 // we're running with the --always-opt or the --prepare-always-opt
1101 // flag, we need to use the runtime function so that the new function 1128 // flag, we need to use the runtime function so that the new function
1102 // we are creating here gets a chance to have its code optimized and 1129 // we are creating here gets a chance to have its code optimized and
1103 // doesn't just get a copy of the existing unoptimized code. 1130 // doesn't just get a copy of the existing unoptimized code.
1104 if (!FLAG_always_opt && 1131 if (!FLAG_always_opt &&
1105 !FLAG_prepare_always_opt && 1132 !FLAG_prepare_always_opt &&
1106 !pretenure && 1133 !pretenure &&
1107 scope()->is_function_scope() && 1134 scope()->is_function_scope() &&
1108 info->num_literals() == 0) { 1135 info->num_literals() == 0) {
1109 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); 1136 FastNewClosureStub stub(info->strict_mode_flag());
1110 __ li(a0, Operand(info)); 1137 __ li(a0, Operand(info));
1111 __ push(a0); 1138 __ push(a0);
1112 __ CallStub(&stub); 1139 __ CallStub(&stub);
1113 } else { 1140 } else {
1114 __ li(a0, Operand(info)); 1141 __ li(a0, Operand(info));
1115 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex 1142 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1116 : Heap::kFalseValueRootIndex); 1143 : Heap::kFalseValueRootIndex);
1117 __ Push(cp, a0, a1); 1144 __ Push(cp, a0, a1);
1118 __ CallRuntime(Runtime::kNewClosure, 3); 1145 __ CallRuntime(Runtime::kNewClosure, 3);
1119 } 1146 }
(...skipping 10 matching lines...) Expand all
1130 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1157 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1131 TypeofState typeof_state, 1158 TypeofState typeof_state,
1132 Label* slow) { 1159 Label* slow) {
1133 Register current = cp; 1160 Register current = cp;
1134 Register next = a1; 1161 Register next = a1;
1135 Register temp = a2; 1162 Register temp = a2;
1136 1163
1137 Scope* s = scope(); 1164 Scope* s = scope();
1138 while (s != NULL) { 1165 while (s != NULL) {
1139 if (s->num_heap_slots() > 0) { 1166 if (s->num_heap_slots() > 0) {
1140 if (s->calls_eval()) { 1167 if (s->calls_non_strict_eval()) {
1141 // Check that extension is NULL. 1168 // Check that extension is NULL.
1142 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1169 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1143 __ Branch(slow, ne, temp, Operand(zero_reg)); 1170 __ Branch(slow, ne, temp, Operand(zero_reg));
1144 } 1171 }
1145 // Load next context in chain. 1172 // Load next context in chain.
1146 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1173 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1147 // Walk the rest of the chain without clobbering cp. 1174 // Walk the rest of the chain without clobbering cp.
1148 current = next; 1175 current = next;
1149 } 1176 }
1150 // If no outer scope calls eval, we do not need to check more 1177 // If no outer scope calls eval, we do not need to check more
1151 // context extensions. 1178 // context extensions.
1152 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; 1179 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1153 s = s->outer_scope(); 1180 s = s->outer_scope();
1154 } 1181 }
1155 1182
1156 if (s->is_eval_scope()) { 1183 if (s->is_eval_scope()) {
1157 Label loop, fast; 1184 Label loop, fast;
1158 if (!current.is(next)) { 1185 if (!current.is(next)) {
1159 __ Move(next, current); 1186 __ Move(next, current);
1160 } 1187 }
1161 __ bind(&loop); 1188 __ bind(&loop);
1162 // Terminate at global context. 1189 // Terminate at global context.
(...skipping 21 matching lines...) Expand all
1184 1211
1185 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1212 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1186 Label* slow) { 1213 Label* slow) {
1187 ASSERT(var->IsContextSlot()); 1214 ASSERT(var->IsContextSlot());
1188 Register context = cp; 1215 Register context = cp;
1189 Register next = a3; 1216 Register next = a3;
1190 Register temp = t0; 1217 Register temp = t0;
1191 1218
1192 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1219 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1193 if (s->num_heap_slots() > 0) { 1220 if (s->num_heap_slots() > 0) {
1194 if (s->calls_eval()) { 1221 if (s->calls_non_strict_eval()) {
1195 // Check that extension is NULL. 1222 // Check that extension is NULL.
1196 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1223 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1197 __ Branch(slow, ne, temp, Operand(zero_reg)); 1224 __ Branch(slow, ne, temp, Operand(zero_reg));
1198 } 1225 }
1199 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1226 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1200 // Walk the rest of the chain without clobbering cp. 1227 // Walk the rest of the chain without clobbering cp.
1201 context = next; 1228 context = next;
1202 } 1229 }
1203 } 1230 }
1204 // Check that last extension is NULL. 1231 // Check that last extension is NULL.
(...skipping 16 matching lines...) Expand all
1221 // introducing variables. In those cases, we do not want to 1248 // introducing variables. In those cases, we do not want to
1222 // perform a runtime call for all variables in the scope 1249 // perform a runtime call for all variables in the scope
1223 // containing the eval. 1250 // containing the eval.
1224 if (var->mode() == DYNAMIC_GLOBAL) { 1251 if (var->mode() == DYNAMIC_GLOBAL) {
1225 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1252 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1226 __ Branch(done); 1253 __ Branch(done);
1227 } else if (var->mode() == DYNAMIC_LOCAL) { 1254 } else if (var->mode() == DYNAMIC_LOCAL) {
1228 Variable* local = var->local_if_not_shadowed(); 1255 Variable* local = var->local_if_not_shadowed();
1229 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); 1256 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1230 if (local->mode() == CONST || 1257 if (local->mode() == CONST ||
1258 local->mode() == CONST_HARMONY ||
1231 local->mode() == LET) { 1259 local->mode() == LET) {
1232 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1260 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1233 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1261 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1234 if (local->mode() == CONST) { 1262 if (local->mode() == CONST) {
1235 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1263 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1236 __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole. 1264 __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1237 } else { // LET 1265 } else { // LET || CONST_HARMONY
1238 __ Branch(done, ne, at, Operand(zero_reg)); 1266 __ Branch(done, ne, at, Operand(zero_reg));
1239 __ li(a0, Operand(var->name())); 1267 __ li(a0, Operand(var->name()));
1240 __ push(a0); 1268 __ push(a0);
1241 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1269 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1242 } 1270 }
1243 } 1271 }
1244 __ Branch(done); 1272 __ Branch(done);
1245 } 1273 }
1246 } 1274 }
1247 1275
(...skipping 17 matching lines...) Expand all
1265 context()->Plug(v0); 1293 context()->Plug(v0);
1266 break; 1294 break;
1267 } 1295 }
1268 1296
1269 case Variable::PARAMETER: 1297 case Variable::PARAMETER:
1270 case Variable::LOCAL: 1298 case Variable::LOCAL:
1271 case Variable::CONTEXT: { 1299 case Variable::CONTEXT: {
1272 Comment cmnt(masm_, var->IsContextSlot() 1300 Comment cmnt(masm_, var->IsContextSlot()
1273 ? "Context variable" 1301 ? "Context variable"
1274 : "Stack variable"); 1302 : "Stack variable");
1275 if (var->mode() != LET && var->mode() != CONST) { 1303 if (!var->binding_needs_init()) {
1276 context()->Plug(var); 1304 context()->Plug(var);
1277 } else { 1305 } else {
1278 // Let and const need a read barrier. 1306 // Let and const need a read barrier.
1279 GetVar(v0, var); 1307 GetVar(v0, var);
1280 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1308 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1281 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1309 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1282 if (var->mode() == LET) { 1310 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1311 // Throw a reference error when using an uninitialized let/const
1312 // binding in harmony mode.
1283 Label done; 1313 Label done;
1284 __ Branch(&done, ne, at, Operand(zero_reg)); 1314 __ Branch(&done, ne, at, Operand(zero_reg));
1285 __ li(a0, Operand(var->name())); 1315 __ li(a0, Operand(var->name()));
1286 __ push(a0); 1316 __ push(a0);
1287 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1317 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1288 __ bind(&done); 1318 __ bind(&done);
1289 } else { 1319 } else {
1320 // Uninitalized const bindings outside of harmony mode are unholed.
1321 ASSERT(var->mode() == CONST);
1290 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1322 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1291 __ movz(v0, a0, at); // Conditional move: Undefined if TheHole. 1323 __ movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1292 } 1324 }
1293 context()->Plug(v0); 1325 context()->Plug(v0);
1294 } 1326 }
1295 break; 1327 break;
1296 } 1328 }
1297 1329
1298 case Variable::LOOKUP: { 1330 case Variable::LOOKUP: {
1299 Label done, slow; 1331 Label done, slow;
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
1469 context()->Plug(v0); 1501 context()->Plug(v0);
1470 } 1502 }
1471 } 1503 }
1472 1504
1473 1505
1474 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1506 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1475 Comment cmnt(masm_, "[ ArrayLiteral"); 1507 Comment cmnt(masm_, "[ ArrayLiteral");
1476 1508
1477 ZoneList<Expression*>* subexprs = expr->values(); 1509 ZoneList<Expression*>* subexprs = expr->values();
1478 int length = subexprs->length(); 1510 int length = subexprs->length();
1511
1512 Handle<FixedArray> constant_elements = expr->constant_elements();
1513 ASSERT_EQ(2, constant_elements->length());
1514 ElementsKind constant_elements_kind =
1515 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1516 Handle<FixedArrayBase> constant_elements_values(
1517 FixedArrayBase::cast(constant_elements->get(1)));
1518
1479 __ mov(a0, result_register()); 1519 __ mov(a0, result_register());
1480 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1520 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1481 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); 1521 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1482 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1522 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1483 __ li(a1, Operand(expr->constant_elements())); 1523 __ li(a1, Operand(constant_elements));
1484 __ Push(a3, a2, a1); 1524 __ Push(a3, a2, a1);
1485 if (expr->constant_elements()->map() == 1525 if (constant_elements_values->map() ==
1486 isolate()->heap()->fixed_cow_array_map()) { 1526 isolate()->heap()->fixed_cow_array_map()) {
1487 FastCloneShallowArrayStub stub( 1527 FastCloneShallowArrayStub stub(
1488 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 1528 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1489 __ CallStub(&stub); 1529 __ CallStub(&stub);
1490 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1530 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1491 1, a1, a2); 1531 1, a1, a2);
1492 } else if (expr->depth() > 1) { 1532 } else if (expr->depth() > 1) {
1493 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1533 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1494 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1534 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1495 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1535 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1496 } else { 1536 } else {
1497 FastCloneShallowArrayStub stub( 1537 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1498 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 1538 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1539 FLAG_smi_only_arrays);
1540 FastCloneShallowArrayStub::Mode mode =
1541 constant_elements_kind == FAST_DOUBLE_ELEMENTS
1542 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
1543 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
1544 FastCloneShallowArrayStub stub(mode, length);
1499 __ CallStub(&stub); 1545 __ CallStub(&stub);
1500 } 1546 }
1501 1547
1502 bool result_saved = false; // Is the result saved to the stack? 1548 bool result_saved = false; // Is the result saved to the stack?
1503 1549
1504 // Emit code to evaluate all the non-constant subexpressions and to store 1550 // Emit code to evaluate all the non-constant subexpressions and to store
1505 // them into the newly cloned array. 1551 // them into the newly cloned array.
1506 for (int i = 0; i < length; i++) { 1552 for (int i = 0; i < length; i++) {
1507 Expression* subexpr = subexprs->at(i); 1553 Expression* subexpr = subexprs->at(i);
1508 // If the subexpression is a literal or a simple materialized literal it 1554 // If the subexpression is a literal or a simple materialized literal it
1509 // is already set in the cloned array. 1555 // is already set in the cloned array.
1510 if (subexpr->AsLiteral() != NULL || 1556 if (subexpr->AsLiteral() != NULL ||
1511 CompileTimeValue::IsCompileTimeValue(subexpr)) { 1557 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1512 continue; 1558 continue;
1513 } 1559 }
1514 1560
1515 if (!result_saved) { 1561 if (!result_saved) {
1516 __ push(v0); 1562 __ push(v0);
1517 result_saved = true; 1563 result_saved = true;
1518 } 1564 }
1519 VisitForAccumulatorValue(subexpr); 1565 VisitForAccumulatorValue(subexpr);
1520 1566
1521 // Store the subexpression value in the array's elements.
1522 __ lw(t6, MemOperand(sp)); // Copy of array literal. 1567 __ lw(t6, MemOperand(sp)); // Copy of array literal.
1523 __ lw(a1, FieldMemOperand(t6, JSObject::kElementsOffset)); 1568 __ lw(a1, FieldMemOperand(t6, JSObject::kElementsOffset));
1569 __ lw(a2, FieldMemOperand(t6, JSObject::kMapOffset));
1524 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1570 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1571
1572 Label element_done;
1573 Label double_elements;
1574 Label smi_element;
1575 Label slow_elements;
1576 Label fast_elements;
1577 __ CheckFastElements(a2, a3, &double_elements);
1578
1579 // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
1580 __ JumpIfSmi(result_register(), &smi_element);
1581 __ CheckFastSmiOnlyElements(a2, a3, &fast_elements);
1582
1583 // Store into the array literal requires a elements transition. Call into
1584 // the runtime.
1585 __ bind(&slow_elements);
1586 __ push(t6); // Copy of array literal.
1587 __ li(a1, Operand(Smi::FromInt(i)));
1588 __ li(a2, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1589 __ li(a3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode.
1590 __ Push(a1, result_register(), a2, a3);
1591 __ CallRuntime(Runtime::kSetProperty, 5);
1592 __ Branch(&element_done);
1593
1594 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
1595 __ bind(&double_elements);
1596 __ li(a3, Operand(Smi::FromInt(i)));
1597 __ StoreNumberToDoubleElements(result_register(), a3, t6, a1, t0, t1, t5,
1598 t3, &slow_elements);
1599 __ Branch(&element_done);
1600
1601 // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
1602 __ bind(&fast_elements);
1525 __ sw(result_register(), FieldMemOperand(a1, offset)); 1603 __ sw(result_register(), FieldMemOperand(a1, offset));
1604 // Update the write barrier for the array store.
1526 1605
1527 Label no_map_change;
1528 __ JumpIfSmi(result_register(), &no_map_change);
1529 // Update the write barrier for the array store with v0 as the scratch
1530 // register.
1531 __ RecordWriteField( 1606 __ RecordWriteField(
1532 a1, offset, result_register(), a2, kRAHasBeenSaved, kDontSaveFPRegs, 1607 a1, offset, result_register(), a2, kRAHasBeenSaved, kDontSaveFPRegs,
1533 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 1608 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1534 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); 1609 __ Branch(&element_done);
1535 __ CheckFastSmiOnlyElements(a3, a2, &no_map_change); 1610
1536 __ push(t6); // Copy of array literal. 1611 // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
1537 __ CallRuntime(Runtime::kNonSmiElementStored, 1); 1612 // FAST_ELEMENTS, and value is Smi.
1538 __ bind(&no_map_change); 1613 __ bind(&smi_element);
1614 __ sw(result_register(), FieldMemOperand(a1, offset));
1615 // Fall through
1616
1617 __ bind(&element_done);
1539 1618
1540 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1619 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1541 } 1620 }
1542 1621
1543 if (result_saved) { 1622 if (result_saved) {
1544 context()->PlugTOS(); 1623 context()->PlugTOS();
1545 } else { 1624 } else {
1546 context()->Plug(v0); 1625 context()->Plug(v0);
1547 } 1626 }
1548 } 1627 }
(...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after
1910 __ sw(result_register(), location); 1989 __ sw(result_register(), location);
1911 if (var->IsContextSlot()) { 1990 if (var->IsContextSlot()) {
1912 // RecordWrite may destroy all its register arguments. 1991 // RecordWrite may destroy all its register arguments.
1913 __ mov(a3, result_register()); 1992 __ mov(a3, result_register());
1914 int offset = Context::SlotOffset(var->index()); 1993 int offset = Context::SlotOffset(var->index());
1915 __ RecordWriteContextSlot( 1994 __ RecordWriteContextSlot(
1916 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 1995 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
1917 } 1996 }
1918 } 1997 }
1919 1998
1920 } else if (var->mode() != CONST) { 1999 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
1921 // Assignment to var or initializing assignment to let. 2000 // Assignment to var or initializing assignment to let/const
2001 // in harmony mode.
1922 if (var->IsStackAllocated() || var->IsContextSlot()) { 2002 if (var->IsStackAllocated() || var->IsContextSlot()) {
1923 MemOperand location = VarOperand(var, a1); 2003 MemOperand location = VarOperand(var, a1);
1924 if (FLAG_debug_code && op == Token::INIT_LET) { 2004 if (FLAG_debug_code && op == Token::INIT_LET) {
1925 // Check for an uninitialized let binding. 2005 // Check for an uninitialized let binding.
1926 __ lw(a2, location); 2006 __ lw(a2, location);
1927 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2007 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1928 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0)); 2008 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
1929 } 2009 }
1930 // Perform the assignment. 2010 // Perform the assignment.
1931 __ sw(v0, location); 2011 __ sw(v0, location);
(...skipping 864 matching lines...) Expand 10 before | Expand all | Expand 10 after
2796 __ CallRuntime(Runtime::kNumberAlloc, 0); 2876 __ CallRuntime(Runtime::kNumberAlloc, 0);
2797 __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call. 2877 __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call.
2798 2878
2799 __ bind(&heapnumber_allocated); 2879 __ bind(&heapnumber_allocated);
2800 2880
2801 // Convert 32 random bits in v0 to 0.(32 random bits) in a double 2881 // Convert 32 random bits in v0 to 0.(32 random bits) in a double
2802 // by computing: 2882 // by computing:
2803 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 2883 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2804 if (CpuFeatures::IsSupported(FPU)) { 2884 if (CpuFeatures::IsSupported(FPU)) {
2805 __ PrepareCallCFunction(1, a0); 2885 __ PrepareCallCFunction(1, a0);
2806 __ li(a0, Operand(ExternalReference::isolate_address())); 2886 __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX));
2887 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
2807 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); 2888 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2808 2889
2809
2810 CpuFeatures::Scope scope(FPU); 2890 CpuFeatures::Scope scope(FPU);
2811 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 2891 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2812 __ li(a1, Operand(0x41300000)); 2892 __ li(a1, Operand(0x41300000));
2813 // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU. 2893 // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
2814 __ Move(f12, v0, a1); 2894 __ Move(f12, v0, a1);
2815 // Move 0x4130000000000000 to FPU. 2895 // Move 0x4130000000000000 to FPU.
2816 __ Move(f14, zero_reg, a1); 2896 __ Move(f14, zero_reg, a1);
2817 // Subtract and store the result in the heap number. 2897 // Subtract and store the result in the heap number.
2818 __ sub_d(f0, f12, f14); 2898 __ sub_d(f0, f12, f14);
2819 __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag)); 2899 __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag));
2820 __ mov(v0, s0); 2900 __ mov(v0, s0);
2821 } else { 2901 } else {
2822 __ PrepareCallCFunction(2, a0); 2902 __ PrepareCallCFunction(2, a0);
2823 __ mov(a0, s0); 2903 __ mov(a0, s0);
2824 __ li(a1, Operand(ExternalReference::isolate_address())); 2904 __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
2905 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset));
2825 __ CallCFunction( 2906 __ CallCFunction(
2826 ExternalReference::fill_heap_number_with_random_function(isolate()), 2); 2907 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2827 } 2908 }
2828 2909
2829 context()->Plug(v0); 2910 context()->Plug(v0);
2830 } 2911 }
2831 2912
2832 2913
2833 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) { 2914 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2834 // Load the arguments on the stack and call the stub. 2915 // Load the arguments on the stack and call the stub.
(...skipping 1258 matching lines...) Expand 10 before | Expand all | Expand 10 after
4093 break; 4174 break;
4094 } 4175 }
4095 4176
4096 default: { 4177 default: {
4097 VisitForAccumulatorValue(expr->right()); 4178 VisitForAccumulatorValue(expr->right());
4098 Condition cc = eq; 4179 Condition cc = eq;
4099 switch (op) { 4180 switch (op) {
4100 case Token::EQ_STRICT: 4181 case Token::EQ_STRICT:
4101 case Token::EQ: 4182 case Token::EQ:
4102 cc = eq; 4183 cc = eq;
4103 __ mov(a0, result_register());
4104 __ pop(a1);
4105 break; 4184 break;
4106 case Token::LT: 4185 case Token::LT:
4107 cc = lt; 4186 cc = lt;
4108 __ mov(a0, result_register());
4109 __ pop(a1);
4110 break; 4187 break;
4111 case Token::GT: 4188 case Token::GT:
4112 // Reverse left and right sides to obtain ECMA-262 conversion order. 4189 cc = gt;
4113 cc = lt;
4114 __ mov(a1, result_register());
4115 __ pop(a0);
4116 break; 4190 break;
4117 case Token::LTE: 4191 case Token::LTE:
4118 // Reverse left and right sides to obtain ECMA-262 conversion order. 4192 cc = le;
4119 cc = ge;
4120 __ mov(a1, result_register());
4121 __ pop(a0);
4122 break; 4193 break;
4123 case Token::GTE: 4194 case Token::GTE:
4124 cc = ge; 4195 cc = ge;
4125 __ mov(a0, result_register());
4126 __ pop(a1);
4127 break; 4196 break;
4128 case Token::IN: 4197 case Token::IN:
4129 case Token::INSTANCEOF: 4198 case Token::INSTANCEOF:
4130 default: 4199 default:
4131 UNREACHABLE(); 4200 UNREACHABLE();
4132 } 4201 }
4202 __ mov(a0, result_register());
4203 __ pop(a1);
4133 4204
4134 bool inline_smi_code = ShouldInlineSmiCase(op); 4205 bool inline_smi_code = ShouldInlineSmiCase(op);
4135 JumpPatchSite patch_site(masm_); 4206 JumpPatchSite patch_site(masm_);
4136 if (inline_smi_code) { 4207 if (inline_smi_code) {
4137 Label slow_case; 4208 Label slow_case;
4138 __ Or(a2, a0, Operand(a1)); 4209 __ Or(a2, a0, Operand(a1));
4139 patch_site.EmitJumpIfNotSmi(a2, &slow_case); 4210 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4140 Split(cc, a1, Operand(a0), if_true, if_false, NULL); 4211 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4141 __ bind(&slow_case); 4212 __ bind(&slow_case);
4142 } 4213 }
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
4297 *context_length = 0; 4368 *context_length = 0;
4298 return previous_; 4369 return previous_;
4299 } 4370 }
4300 4371
4301 4372
4302 #undef __ 4373 #undef __
4303 4374
4304 } } // namespace v8::internal 4375 } } // namespace v8::internal
4305 4376
4306 #endif // V8_TARGET_ARCH_MIPS 4377 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/deoptimizer-mips.cc ('k') | src/mips/ic-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698