Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(147)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/deoptimizer-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after
262 Comment cmnt(masm_, "[ Declarations"); 262 Comment cmnt(masm_, "[ Declarations");
263 scope()->VisitIllegalRedeclaration(this); 263 scope()->VisitIllegalRedeclaration(this);
264 264
265 } else { 265 } else {
266 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 266 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
267 { Comment cmnt(masm_, "[ Declarations"); 267 { Comment cmnt(masm_, "[ Declarations");
268 // For named function expressions, declare the function name as a 268 // For named function expressions, declare the function name as a
269 // constant. 269 // constant.
270 if (scope()->is_function_scope() && scope()->function() != NULL) { 270 if (scope()->is_function_scope() && scope()->function() != NULL) {
271 int ignored = 0; 271 int ignored = 0;
272 EmitDeclaration(scope()->function(), CONST, NULL, &ignored); 272 VariableProxy* proxy = scope()->function();
273 ASSERT(proxy->var()->mode() == CONST ||
274 proxy->var()->mode() == CONST_HARMONY);
275 EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
273 } 276 }
274 VisitDeclarations(scope()->declarations()); 277 VisitDeclarations(scope()->declarations());
275 } 278 }
276 279
277 { Comment cmnt(masm_, "[ Stack check"); 280 { Comment cmnt(masm_, "[ Stack check");
278 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS); 281 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
279 Label ok; 282 Label ok;
280 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 283 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
281 __ cmp(sp, Operand(ip)); 284 __ cmp(sp, Operand(ip));
282 __ b(hs, &ok); 285 __ b(hs, &ok);
(...skipping 428 matching lines...) Expand 10 before | Expand all | Expand 10 after
711 714
712 715
713 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy, 716 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
714 VariableMode mode, 717 VariableMode mode,
715 FunctionLiteral* function, 718 FunctionLiteral* function,
716 int* global_count) { 719 int* global_count) {
717 // If it was not possible to allocate the variable at compile time, we 720 // If it was not possible to allocate the variable at compile time, we
718 // need to "declare" it at runtime to make sure it actually exists in the 721 // need to "declare" it at runtime to make sure it actually exists in the
719 // local context. 722 // local context.
720 Variable* variable = proxy->var(); 723 Variable* variable = proxy->var();
724 bool binding_needs_init =
725 mode == CONST || mode == CONST_HARMONY || mode == LET;
721 switch (variable->location()) { 726 switch (variable->location()) {
722 case Variable::UNALLOCATED: 727 case Variable::UNALLOCATED:
723 ++(*global_count); 728 ++(*global_count);
724 break; 729 break;
725 730
726 case Variable::PARAMETER: 731 case Variable::PARAMETER:
727 case Variable::LOCAL: 732 case Variable::LOCAL:
728 if (function != NULL) { 733 if (function != NULL) {
729 Comment cmnt(masm_, "[ Declaration"); 734 Comment cmnt(masm_, "[ Declaration");
730 VisitForAccumulatorValue(function); 735 VisitForAccumulatorValue(function);
731 __ str(result_register(), StackOperand(variable)); 736 __ str(result_register(), StackOperand(variable));
732 } else if (mode == CONST || mode == LET) { 737 } else if (binding_needs_init) {
733 Comment cmnt(masm_, "[ Declaration"); 738 Comment cmnt(masm_, "[ Declaration");
734 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 739 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
735 __ str(ip, StackOperand(variable)); 740 __ str(ip, StackOperand(variable));
736 } 741 }
737 break; 742 break;
738 743
739 case Variable::CONTEXT: 744 case Variable::CONTEXT:
740 // The variable in the decl always resides in the current function 745 // The variable in the decl always resides in the current function
741 // context. 746 // context.
742 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 747 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
(...skipping 13 matching lines...) Expand all
756 // We know that we have written a function, which is not a smi. 761 // We know that we have written a function, which is not a smi.
757 __ RecordWriteContextSlot(cp, 762 __ RecordWriteContextSlot(cp,
758 offset, 763 offset,
759 result_register(), 764 result_register(),
760 r2, 765 r2,
761 kLRHasBeenSaved, 766 kLRHasBeenSaved,
762 kDontSaveFPRegs, 767 kDontSaveFPRegs,
763 EMIT_REMEMBERED_SET, 768 EMIT_REMEMBERED_SET,
764 OMIT_SMI_CHECK); 769 OMIT_SMI_CHECK);
765 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 770 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
766 } else if (mode == CONST || mode == LET) { 771 } else if (binding_needs_init) {
767 Comment cmnt(masm_, "[ Declaration"); 772 Comment cmnt(masm_, "[ Declaration");
768 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 773 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
769 __ str(ip, ContextOperand(cp, variable->index())); 774 __ str(ip, ContextOperand(cp, variable->index()));
770 // No write barrier since the_hole_value is in old space. 775 // No write barrier since the_hole_value is in old space.
771 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 776 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
772 } 777 }
773 break; 778 break;
774 779
775 case Variable::LOOKUP: { 780 case Variable::LOOKUP: {
776 Comment cmnt(masm_, "[ Declaration"); 781 Comment cmnt(masm_, "[ Declaration");
777 __ mov(r2, Operand(variable->name())); 782 __ mov(r2, Operand(variable->name()));
778 // Declaration nodes are always introduced in one of three modes. 783 // Declaration nodes are always introduced in one of four modes.
779 ASSERT(mode == VAR || mode == CONST || mode == LET); 784 ASSERT(mode == VAR ||
780 PropertyAttributes attr = (mode == CONST) ? READ_ONLY : NONE; 785 mode == CONST ||
786 mode == CONST_HARMONY ||
787 mode == LET);
788 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
789 ? READ_ONLY : NONE;
781 __ mov(r1, Operand(Smi::FromInt(attr))); 790 __ mov(r1, Operand(Smi::FromInt(attr)));
782 // Push initial value, if any. 791 // Push initial value, if any.
783 // Note: For variables we must not push an initial value (such as 792 // Note: For variables we must not push an initial value (such as
784 // 'undefined') because we may have a (legal) redeclaration and we 793 // 'undefined') because we may have a (legal) redeclaration and we
785 // must not destroy the current value. 794 // must not destroy the current value.
786 if (function != NULL) { 795 if (function != NULL) {
787 __ Push(cp, r2, r1); 796 __ Push(cp, r2, r1);
788 // Push initial value for function declaration. 797 // Push initial value for function declaration.
789 VisitForStackValue(function); 798 VisitForStackValue(function);
790 } else if (mode == CONST || mode == LET) { 799 } else if (binding_needs_init) {
791 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 800 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
792 __ Push(cp, r2, r1, r0); 801 __ Push(cp, r2, r1, r0);
793 } else { 802 } else {
794 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 803 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
795 __ Push(cp, r2, r1, r0); 804 __ Push(cp, r2, r1, r0);
796 } 805 }
797 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 806 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
798 break; 807 break;
799 } 808 }
800 } 809 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
922 Label convert, done_convert; 931 Label convert, done_convert;
923 __ JumpIfSmi(r0, &convert); 932 __ JumpIfSmi(r0, &convert);
924 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 933 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
925 __ b(ge, &done_convert); 934 __ b(ge, &done_convert);
926 __ bind(&convert); 935 __ bind(&convert);
927 __ push(r0); 936 __ push(r0);
928 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 937 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
929 __ bind(&done_convert); 938 __ bind(&done_convert);
930 __ push(r0); 939 __ push(r0);
931 940
941 // Check for proxies.
942 Label call_runtime;
943 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
944 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
945 __ b(le, &call_runtime);
946
932 // Check cache validity in generated code. This is a fast case for 947 // Check cache validity in generated code. This is a fast case for
933 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 948 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
934 // guarantee cache validity, call the runtime system to check cache 949 // guarantee cache validity, call the runtime system to check cache
935 // validity or get the property names in a fixed array. 950 // validity or get the property names in a fixed array.
936 Label next, call_runtime; 951 Label next;
937 // Preload a couple of values used in the loop. 952 // Preload a couple of values used in the loop.
938 Register empty_fixed_array_value = r6; 953 Register empty_fixed_array_value = r6;
939 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); 954 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
940 Register empty_descriptor_array_value = r7; 955 Register empty_descriptor_array_value = r7;
941 __ LoadRoot(empty_descriptor_array_value, 956 __ LoadRoot(empty_descriptor_array_value,
942 Heap::kEmptyDescriptorArrayRootIndex); 957 Heap::kEmptyDescriptorArrayRootIndex);
943 __ mov(r1, r0); 958 __ mov(r1, r0);
944 __ bind(&next); 959 __ bind(&next);
945 960
946 // Check that there are no elements. Register r1 contains the 961 // Check that there are no elements. Register r1 contains the
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1005 1020
1006 // Setup the four remaining stack slots. 1021 // Setup the four remaining stack slots.
1007 __ push(r0); // Map. 1022 __ push(r0); // Map.
1008 __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset)); 1023 __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
1009 __ mov(r0, Operand(Smi::FromInt(0))); 1024 __ mov(r0, Operand(Smi::FromInt(0)));
1010 // Push enumeration cache, enumeration cache length (as smi) and zero. 1025 // Push enumeration cache, enumeration cache length (as smi) and zero.
1011 __ Push(r2, r1, r0); 1026 __ Push(r2, r1, r0);
1012 __ jmp(&loop); 1027 __ jmp(&loop);
1013 1028
1014 // We got a fixed array in register r0. Iterate through that. 1029 // We got a fixed array in register r0. Iterate through that.
1030 Label non_proxy;
1015 __ bind(&fixed_array); 1031 __ bind(&fixed_array);
1016 __ mov(r1, Operand(Smi::FromInt(0))); // Map (0) - force slow check. 1032 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1017 __ Push(r1, r0); 1033 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1034 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1035 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1036 __ b(gt, &non_proxy);
1037 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1038 __ bind(&non_proxy);
1039 __ Push(r1, r0); // Smi and array
1018 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1040 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1019 __ mov(r0, Operand(Smi::FromInt(0))); 1041 __ mov(r0, Operand(Smi::FromInt(0)));
1020 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1042 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1021 1043
1022 // Generate code for doing the condition check. 1044 // Generate code for doing the condition check.
1023 __ bind(&loop); 1045 __ bind(&loop);
1024 // Load the current count to r0, load the length to r1. 1046 // Load the current count to r0, load the length to r1.
1025 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1047 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1026 __ cmp(r0, r1); // Compare to the array length. 1048 __ cmp(r0, r1); // Compare to the array length.
1027 __ b(hs, loop_statement.break_label()); 1049 __ b(hs, loop_statement.break_label());
1028 1050
1029 // Get the current entry of the array into register r3. 1051 // Get the current entry of the array into register r3.
1030 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1052 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1031 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1053 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1032 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1054 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1033 1055
1034 // Get the expected map from the stack or a zero map in the 1056 // Get the expected map from the stack or a smi in the
1035 // permanent slow case into register r2. 1057 // permanent slow case into register r2.
1036 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1058 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1037 1059
1038 // Check if the expected map still matches that of the enumerable. 1060 // Check if the expected map still matches that of the enumerable.
1039 // If not, we have to filter the key. 1061 // If not, we may have to filter the key.
1040 Label update_each; 1062 Label update_each;
1041 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1063 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1042 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1064 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1043 __ cmp(r4, Operand(r2)); 1065 __ cmp(r4, Operand(r2));
1044 __ b(eq, &update_each); 1066 __ b(eq, &update_each);
1045 1067
1068 // For proxies, no filtering is done.
1069 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1070 __ cmp(r2, Operand(Smi::FromInt(0)));
1071 __ b(eq, &update_each);
1072
1046 // Convert the entry to a string or (smi) 0 if it isn't a property 1073 // Convert the entry to a string or (smi) 0 if it isn't a property
1047 // any more. If the property has been removed while iterating, we 1074 // any more. If the property has been removed while iterating, we
1048 // just skip it. 1075 // just skip it.
1049 __ push(r1); // Enumerable. 1076 __ push(r1); // Enumerable.
1050 __ push(r3); // Current entry. 1077 __ push(r3); // Current entry.
1051 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1078 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1052 __ mov(r3, Operand(r0), SetCC); 1079 __ mov(r3, Operand(r0), SetCC);
1053 __ b(eq, loop_statement.continue_label()); 1080 __ b(eq, loop_statement.continue_label());
1054 1081
1055 // Update the 'each' property or variable from the possibly filtered 1082 // Update the 'each' property or variable from the possibly filtered
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1090 // space for nested functions that don't need literals cloning. If 1117 // space for nested functions that don't need literals cloning. If
1091 // we're running with the --always-opt or the --prepare-always-opt 1118 // we're running with the --always-opt or the --prepare-always-opt
1092 // flag, we need to use the runtime function so that the new function 1119 // flag, we need to use the runtime function so that the new function
1093 // we are creating here gets a chance to have its code optimized and 1120 // we are creating here gets a chance to have its code optimized and
1094 // doesn't just get a copy of the existing unoptimized code. 1121 // doesn't just get a copy of the existing unoptimized code.
1095 if (!FLAG_always_opt && 1122 if (!FLAG_always_opt &&
1096 !FLAG_prepare_always_opt && 1123 !FLAG_prepare_always_opt &&
1097 !pretenure && 1124 !pretenure &&
1098 scope()->is_function_scope() && 1125 scope()->is_function_scope() &&
1099 info->num_literals() == 0) { 1126 info->num_literals() == 0) {
1100 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); 1127 FastNewClosureStub stub(info->strict_mode_flag());
1101 __ mov(r0, Operand(info)); 1128 __ mov(r0, Operand(info));
1102 __ push(r0); 1129 __ push(r0);
1103 __ CallStub(&stub); 1130 __ CallStub(&stub);
1104 } else { 1131 } else {
1105 __ mov(r0, Operand(info)); 1132 __ mov(r0, Operand(info));
1106 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1133 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1107 : Heap::kFalseValueRootIndex); 1134 : Heap::kFalseValueRootIndex);
1108 __ Push(cp, r0, r1); 1135 __ Push(cp, r0, r1);
1109 __ CallRuntime(Runtime::kNewClosure, 3); 1136 __ CallRuntime(Runtime::kNewClosure, 3);
1110 } 1137 }
(...skipping 10 matching lines...) Expand all
1121 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1148 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1122 TypeofState typeof_state, 1149 TypeofState typeof_state,
1123 Label* slow) { 1150 Label* slow) {
1124 Register current = cp; 1151 Register current = cp;
1125 Register next = r1; 1152 Register next = r1;
1126 Register temp = r2; 1153 Register temp = r2;
1127 1154
1128 Scope* s = scope(); 1155 Scope* s = scope();
1129 while (s != NULL) { 1156 while (s != NULL) {
1130 if (s->num_heap_slots() > 0) { 1157 if (s->num_heap_slots() > 0) {
1131 if (s->calls_eval()) { 1158 if (s->calls_non_strict_eval()) {
1132 // Check that extension is NULL. 1159 // Check that extension is NULL.
1133 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1160 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1134 __ tst(temp, temp); 1161 __ tst(temp, temp);
1135 __ b(ne, slow); 1162 __ b(ne, slow);
1136 } 1163 }
1137 // Load next context in chain. 1164 // Load next context in chain.
1138 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1165 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1139 // Walk the rest of the chain without clobbering cp. 1166 // Walk the rest of the chain without clobbering cp.
1140 current = next; 1167 current = next;
1141 } 1168 }
1142 // If no outer scope calls eval, we do not need to check more 1169 // If no outer scope calls eval, we do not need to check more
1143 // context extensions. 1170 // context extensions.
1144 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; 1171 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1145 s = s->outer_scope(); 1172 s = s->outer_scope();
1146 } 1173 }
1147 1174
1148 if (s->is_eval_scope()) { 1175 if (s->is_eval_scope()) {
1149 Label loop, fast; 1176 Label loop, fast;
1150 if (!current.is(next)) { 1177 if (!current.is(next)) {
1151 __ Move(next, current); 1178 __ Move(next, current);
1152 } 1179 }
1153 __ bind(&loop); 1180 __ bind(&loop);
1154 // Terminate at global context. 1181 // Terminate at global context.
(...skipping 23 matching lines...) Expand all
1178 1205
1179 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1206 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1180 Label* slow) { 1207 Label* slow) {
1181 ASSERT(var->IsContextSlot()); 1208 ASSERT(var->IsContextSlot());
1182 Register context = cp; 1209 Register context = cp;
1183 Register next = r3; 1210 Register next = r3;
1184 Register temp = r4; 1211 Register temp = r4;
1185 1212
1186 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1213 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1187 if (s->num_heap_slots() > 0) { 1214 if (s->num_heap_slots() > 0) {
1188 if (s->calls_eval()) { 1215 if (s->calls_non_strict_eval()) {
1189 // Check that extension is NULL. 1216 // Check that extension is NULL.
1190 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1217 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1191 __ tst(temp, temp); 1218 __ tst(temp, temp);
1192 __ b(ne, slow); 1219 __ b(ne, slow);
1193 } 1220 }
1194 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1221 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1195 // Walk the rest of the chain without clobbering cp. 1222 // Walk the rest of the chain without clobbering cp.
1196 context = next; 1223 context = next;
1197 } 1224 }
1198 } 1225 }
(...skipping 18 matching lines...) Expand all
1217 // introducing variables. In those cases, we do not want to 1244 // introducing variables. In those cases, we do not want to
1218 // perform a runtime call for all variables in the scope 1245 // perform a runtime call for all variables in the scope
1219 // containing the eval. 1246 // containing the eval.
1220 if (var->mode() == DYNAMIC_GLOBAL) { 1247 if (var->mode() == DYNAMIC_GLOBAL) {
1221 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1248 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1222 __ jmp(done); 1249 __ jmp(done);
1223 } else if (var->mode() == DYNAMIC_LOCAL) { 1250 } else if (var->mode() == DYNAMIC_LOCAL) {
1224 Variable* local = var->local_if_not_shadowed(); 1251 Variable* local = var->local_if_not_shadowed();
1225 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1252 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1226 if (local->mode() == CONST || 1253 if (local->mode() == CONST ||
1254 local->mode() == CONST_HARMONY ||
1227 local->mode() == LET) { 1255 local->mode() == LET) {
1228 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1256 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1229 if (local->mode() == CONST) { 1257 if (local->mode() == CONST) {
1230 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1258 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1231 } else { // LET 1259 } else { // LET || CONST_HARMONY
1232 __ b(ne, done); 1260 __ b(ne, done);
1233 __ mov(r0, Operand(var->name())); 1261 __ mov(r0, Operand(var->name()));
1234 __ push(r0); 1262 __ push(r0);
1235 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1263 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1236 } 1264 }
1237 } 1265 }
1238 __ jmp(done); 1266 __ jmp(done);
1239 } 1267 }
1240 } 1268 }
1241 1269
(...skipping 17 matching lines...) Expand all
1259 context()->Plug(r0); 1287 context()->Plug(r0);
1260 break; 1288 break;
1261 } 1289 }
1262 1290
1263 case Variable::PARAMETER: 1291 case Variable::PARAMETER:
1264 case Variable::LOCAL: 1292 case Variable::LOCAL:
1265 case Variable::CONTEXT: { 1293 case Variable::CONTEXT: {
1266 Comment cmnt(masm_, var->IsContextSlot() 1294 Comment cmnt(masm_, var->IsContextSlot()
1267 ? "Context variable" 1295 ? "Context variable"
1268 : "Stack variable"); 1296 : "Stack variable");
1269 if (var->mode() != LET && var->mode() != CONST) { 1297 if (!var->binding_needs_init()) {
1270 context()->Plug(var); 1298 context()->Plug(var);
1271 } else { 1299 } else {
1272 // Let and const need a read barrier. 1300 // Let and const need a read barrier.
1273 GetVar(r0, var); 1301 GetVar(r0, var);
1274 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1302 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1275 if (var->mode() == LET) { 1303 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1304 // Throw a reference error when using an uninitialized let/const
1305 // binding in harmony mode.
1276 Label done; 1306 Label done;
1277 __ b(ne, &done); 1307 __ b(ne, &done);
1278 __ mov(r0, Operand(var->name())); 1308 __ mov(r0, Operand(var->name()));
1279 __ push(r0); 1309 __ push(r0);
1280 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1310 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1281 __ bind(&done); 1311 __ bind(&done);
1282 } else { 1312 } else {
1313 // Uninitalized const bindings outside of harmony mode are unholed.
1314 ASSERT(var->mode() == CONST);
1283 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1315 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1284 } 1316 }
1285 context()->Plug(r0); 1317 context()->Plug(r0);
1286 } 1318 }
1287 break; 1319 break;
1288 } 1320 }
1289 1321
1290 case Variable::LOOKUP: { 1322 case Variable::LOOKUP: {
1291 Label done, slow; 1323 Label done, slow;
1292 // Generate code for loading from variables potentially shadowed 1324 // Generate code for loading from variables potentially shadowed
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
1460 context()->Plug(r0); 1492 context()->Plug(r0);
1461 } 1493 }
1462 } 1494 }
1463 1495
1464 1496
1465 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1497 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1466 Comment cmnt(masm_, "[ ArrayLiteral"); 1498 Comment cmnt(masm_, "[ ArrayLiteral");
1467 1499
1468 ZoneList<Expression*>* subexprs = expr->values(); 1500 ZoneList<Expression*>* subexprs = expr->values();
1469 int length = subexprs->length(); 1501 int length = subexprs->length();
1502 Handle<FixedArray> constant_elements = expr->constant_elements();
1503 ASSERT_EQ(2, constant_elements->length());
1504 ElementsKind constant_elements_kind =
1505 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1506 Handle<FixedArrayBase> constant_elements_values(
1507 FixedArrayBase::cast(constant_elements->get(1)));
1470 1508
1471 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1509 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1472 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1510 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1473 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1511 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1474 __ mov(r1, Operand(expr->constant_elements())); 1512 __ mov(r1, Operand(constant_elements));
1475 __ Push(r3, r2, r1); 1513 __ Push(r3, r2, r1);
1476 if (expr->constant_elements()->map() == 1514 if (constant_elements_values->map() ==
1477 isolate()->heap()->fixed_cow_array_map()) { 1515 isolate()->heap()->fixed_cow_array_map()) {
1478 FastCloneShallowArrayStub stub( 1516 FastCloneShallowArrayStub stub(
1479 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 1517 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1480 __ CallStub(&stub); 1518 __ CallStub(&stub);
1481 __ IncrementCounter( 1519 __ IncrementCounter(
1482 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); 1520 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1483 } else if (expr->depth() > 1) { 1521 } else if (expr->depth() > 1) {
1484 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1522 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1485 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1523 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1486 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1524 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1487 } else { 1525 } else {
1488 FastCloneShallowArrayStub stub( 1526 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1489 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 1527 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1528 FLAG_smi_only_arrays);
1529 FastCloneShallowArrayStub::Mode mode =
1530 constant_elements_kind == FAST_DOUBLE_ELEMENTS
1531 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
1532 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
1533 FastCloneShallowArrayStub stub(mode, length);
1490 __ CallStub(&stub); 1534 __ CallStub(&stub);
1491 } 1535 }
1492 1536
1493 bool result_saved = false; // Is the result saved to the stack? 1537 bool result_saved = false; // Is the result saved to the stack?
1494 1538
1495 // Emit code to evaluate all the non-constant subexpressions and to store 1539 // Emit code to evaluate all the non-constant subexpressions and to store
1496 // them into the newly cloned array. 1540 // them into the newly cloned array.
1497 for (int i = 0; i < length; i++) { 1541 for (int i = 0; i < length; i++) {
1498 Expression* subexpr = subexprs->at(i); 1542 Expression* subexpr = subexprs->at(i);
1499 // If the subexpression is a literal or a simple materialized literal it 1543 // If the subexpression is a literal or a simple materialized literal it
1500 // is already set in the cloned array. 1544 // is already set in the cloned array.
1501 if (subexpr->AsLiteral() != NULL || 1545 if (subexpr->AsLiteral() != NULL ||
1502 CompileTimeValue::IsCompileTimeValue(subexpr)) { 1546 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1503 continue; 1547 continue;
1504 } 1548 }
1505 1549
1506 if (!result_saved) { 1550 if (!result_saved) {
1507 __ push(r0); 1551 __ push(r0);
1508 result_saved = true; 1552 result_saved = true;
1509 } 1553 }
1510 VisitForAccumulatorValue(subexpr); 1554 VisitForAccumulatorValue(subexpr);
1511 1555
1512 // Store the subexpression value in the array's elements.
1513 __ ldr(r6, MemOperand(sp)); // Copy of array literal. 1556 __ ldr(r6, MemOperand(sp)); // Copy of array literal.
1514 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1557 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1558 __ ldr(r2, FieldMemOperand(r6, JSObject::kMapOffset));
1515 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1559 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1560
1561 Label element_done;
1562 Label double_elements;
1563 Label smi_element;
1564 Label slow_elements;
1565 Label fast_elements;
1566 __ CheckFastElements(r2, r3, &double_elements);
1567
1568 // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
1569 __ JumpIfSmi(result_register(), &smi_element);
1570 __ CheckFastSmiOnlyElements(r2, r3, &fast_elements);
1571
1572 // Store into the array literal requires a elements transition. Call into
1573 // the runtime.
1574 __ bind(&slow_elements);
1575 __ push(r6); // Copy of array literal.
1576 __ mov(r1, Operand(Smi::FromInt(i)));
1577 __ mov(r2, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1578 __ mov(r3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode.
1579 __ Push(r1, result_register(), r2, r3);
1580 __ CallRuntime(Runtime::kSetProperty, 5);
1581 __ b(&element_done);
1582
1583 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
1584 __ bind(&double_elements);
1585 __ mov(r3, Operand(Smi::FromInt(i)));
1586 __ StoreNumberToDoubleElements(result_register(), r3, r6, r1, r4, r5, r9,
1587 r7, &slow_elements);
1588 __ b(&element_done);
1589
1590 // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
1591 __ bind(&fast_elements);
1516 __ str(result_register(), FieldMemOperand(r1, offset)); 1592 __ str(result_register(), FieldMemOperand(r1, offset));
1517 1593 // Update the write barrier for the array store.
1518 Label no_map_change;
1519 __ JumpIfSmi(result_register(), &no_map_change);
1520 // Update the write barrier for the array store with r0 as the scratch
1521 // register.
1522 __ RecordWriteField( 1594 __ RecordWriteField(
1523 r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs, 1595 r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs,
1524 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 1596 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1525 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); 1597 __ b(&element_done);
1526 __ CheckFastSmiOnlyElements(r3, r2, &no_map_change); 1598
1527 __ push(r6); // Copy of array literal. 1599 // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
1528 __ CallRuntime(Runtime::kNonSmiElementStored, 1); 1600 // FAST_ELEMENTS, and value is Smi.
1529 __ bind(&no_map_change); 1601 __ bind(&smi_element);
1602 __ str(result_register(), FieldMemOperand(r1, offset));
1603 // Fall through
1604
1605 __ bind(&element_done);
1530 1606
1531 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1607 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1532 } 1608 }
1533 1609
1534 if (result_saved) { 1610 if (result_saved) {
1535 context()->PlugTOS(); 1611 context()->PlugTOS();
1536 } else { 1612 } else {
1537 context()->Plug(r0); 1613 context()->Plug(r0);
1538 } 1614 }
1539 } 1615 }
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
1896 __ str(result_register(), location); 1972 __ str(result_register(), location);
1897 if (var->IsContextSlot()) { 1973 if (var->IsContextSlot()) {
1898 // RecordWrite may destroy all its register arguments. 1974 // RecordWrite may destroy all its register arguments.
1899 __ mov(r3, result_register()); 1975 __ mov(r3, result_register());
1900 int offset = Context::SlotOffset(var->index()); 1976 int offset = Context::SlotOffset(var->index());
1901 __ RecordWriteContextSlot( 1977 __ RecordWriteContextSlot(
1902 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 1978 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1903 } 1979 }
1904 } 1980 }
1905 1981
1906 } else if (var->mode() != CONST) { 1982 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
1907 // Assignment to var or initializing assignment to let. 1983 // Assignment to var or initializing assignment to let/const
1984 // in harmony mode.
1908 if (var->IsStackAllocated() || var->IsContextSlot()) { 1985 if (var->IsStackAllocated() || var->IsContextSlot()) {
1909 MemOperand location = VarOperand(var, r1); 1986 MemOperand location = VarOperand(var, r1);
1910 if (FLAG_debug_code && op == Token::INIT_LET) { 1987 if (FLAG_debug_code && op == Token::INIT_LET) {
1911 // Check for an uninitialized let binding. 1988 // Check for an uninitialized let binding.
1912 __ ldr(r2, location); 1989 __ ldr(r2, location);
1913 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 1990 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1914 __ Check(eq, "Let binding re-initialization."); 1991 __ Check(eq, "Let binding re-initialization.");
1915 } 1992 }
1916 // Perform the assignment. 1993 // Perform the assignment.
1917 __ str(r0, location); 1994 __ str(r0, location);
(...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after
2777 __ CallRuntime(Runtime::kNumberAlloc, 0); 2854 __ CallRuntime(Runtime::kNumberAlloc, 0);
2778 __ mov(r4, Operand(r0)); 2855 __ mov(r4, Operand(r0));
2779 2856
2780 __ bind(&heapnumber_allocated); 2857 __ bind(&heapnumber_allocated);
2781 2858
2782 // Convert 32 random bits in r0 to 0.(32 random bits) in a double 2859 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
2783 // by computing: 2860 // by computing:
2784 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 2861 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2785 if (CpuFeatures::IsSupported(VFP3)) { 2862 if (CpuFeatures::IsSupported(VFP3)) {
2786 __ PrepareCallCFunction(1, r0); 2863 __ PrepareCallCFunction(1, r0);
2787 __ mov(r0, Operand(ExternalReference::isolate_address())); 2864 __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2865 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
2788 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); 2866 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2789 2867
2790 CpuFeatures::Scope scope(VFP3); 2868 CpuFeatures::Scope scope(VFP3);
2791 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 2869 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2792 // Create this constant using mov/orr to avoid PC relative load. 2870 // Create this constant using mov/orr to avoid PC relative load.
2793 __ mov(r1, Operand(0x41000000)); 2871 __ mov(r1, Operand(0x41000000));
2794 __ orr(r1, r1, Operand(0x300000)); 2872 __ orr(r1, r1, Operand(0x300000));
2795 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. 2873 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
2796 __ vmov(d7, r0, r1); 2874 __ vmov(d7, r0, r1);
2797 // Move 0x4130000000000000 to VFP. 2875 // Move 0x4130000000000000 to VFP.
2798 __ mov(r0, Operand(0, RelocInfo::NONE)); 2876 __ mov(r0, Operand(0, RelocInfo::NONE));
2799 __ vmov(d8, r0, r1); 2877 __ vmov(d8, r0, r1);
2800 // Subtract and store the result in the heap number. 2878 // Subtract and store the result in the heap number.
2801 __ vsub(d7, d7, d8); 2879 __ vsub(d7, d7, d8);
2802 __ sub(r0, r4, Operand(kHeapObjectTag)); 2880 __ sub(r0, r4, Operand(kHeapObjectTag));
2803 __ vstr(d7, r0, HeapNumber::kValueOffset); 2881 __ vstr(d7, r0, HeapNumber::kValueOffset);
2804 __ mov(r0, r4); 2882 __ mov(r0, r4);
2805 } else { 2883 } else {
2806 __ PrepareCallCFunction(2, r0); 2884 __ PrepareCallCFunction(2, r0);
2885 __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2807 __ mov(r0, Operand(r4)); 2886 __ mov(r0, Operand(r4));
2808 __ mov(r1, Operand(ExternalReference::isolate_address())); 2887 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
2809 __ CallCFunction( 2888 __ CallCFunction(
2810 ExternalReference::fill_heap_number_with_random_function(isolate()), 2); 2889 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2811 } 2890 }
2812 2891
2813 context()->Plug(r0); 2892 context()->Plug(r0);
2814 } 2893 }
2815 2894
2816 2895
2817 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) { 2896 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2818 // Load the arguments on the stack and call the stub. 2897 // Load the arguments on the stack and call the stub.
(...skipping 1245 matching lines...) Expand 10 before | Expand all | Expand 10 after
4064 break; 4143 break;
4065 } 4144 }
4066 4145
4067 default: { 4146 default: {
4068 VisitForAccumulatorValue(expr->right()); 4147 VisitForAccumulatorValue(expr->right());
4069 Condition cond = eq; 4148 Condition cond = eq;
4070 switch (op) { 4149 switch (op) {
4071 case Token::EQ_STRICT: 4150 case Token::EQ_STRICT:
4072 case Token::EQ: 4151 case Token::EQ:
4073 cond = eq; 4152 cond = eq;
4074 __ pop(r1);
4075 break; 4153 break;
4076 case Token::LT: 4154 case Token::LT:
4077 cond = lt; 4155 cond = lt;
4078 __ pop(r1);
4079 break; 4156 break;
4080 case Token::GT: 4157 case Token::GT:
4081 // Reverse left and right sides to obtain ECMA-262 conversion order. 4158 cond = gt;
4082 cond = lt;
4083 __ mov(r1, result_register());
4084 __ pop(r0);
4085 break; 4159 break;
4086 case Token::LTE: 4160 case Token::LTE:
4087 // Reverse left and right sides to obtain ECMA-262 conversion order. 4161 cond = le;
4088 cond = ge;
4089 __ mov(r1, result_register());
4090 __ pop(r0);
4091 break; 4162 break;
4092 case Token::GTE: 4163 case Token::GTE:
4093 cond = ge; 4164 cond = ge;
4094 __ pop(r1);
4095 break; 4165 break;
4096 case Token::IN: 4166 case Token::IN:
4097 case Token::INSTANCEOF: 4167 case Token::INSTANCEOF:
4098 default: 4168 default:
4099 UNREACHABLE(); 4169 UNREACHABLE();
4100 } 4170 }
4171 __ pop(r1);
4101 4172
4102 bool inline_smi_code = ShouldInlineSmiCase(op); 4173 bool inline_smi_code = ShouldInlineSmiCase(op);
4103 JumpPatchSite patch_site(masm_); 4174 JumpPatchSite patch_site(masm_);
4104 if (inline_smi_code) { 4175 if (inline_smi_code) {
4105 Label slow_case; 4176 Label slow_case;
4106 __ orr(r2, r0, Operand(r1)); 4177 __ orr(r2, r0, Operand(r1));
4107 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4178 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4108 __ cmp(r1, r0); 4179 __ cmp(r1, r0);
4109 Split(cond, if_true, if_false, NULL); 4180 Split(cond, if_true, if_false, NULL);
4110 __ bind(&slow_case); 4181 __ bind(&slow_case);
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
4268 *context_length = 0; 4339 *context_length = 0;
4269 return previous_; 4340 return previous_;
4270 } 4341 }
4271 4342
4272 4343
4273 #undef __ 4344 #undef __
4274 4345
4275 } } // namespace v8::internal 4346 } } // namespace v8::internal
4276 4347
4277 #endif // V8_TARGET_ARCH_ARM 4348 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/deoptimizer-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698