Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 24596002: ARM: Let the register allocator handle the context register. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed first round of comments. Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after
241 r3, 241 r3,
242 GetLinkRegisterState(), 242 GetLinkRegisterState(),
243 kSaveFPRegs); 243 kSaveFPRegs);
244 } 244 }
245 } 245 }
246 Comment(";;; End allocate local context"); 246 Comment(";;; End allocate local context");
247 } 247 }
248 248
249 // Trace the call. 249 // Trace the call.
250 if (FLAG_trace && info()->IsOptimizing()) { 250 if (FLAG_trace && info()->IsOptimizing()) {
251 // We have not executed any compiled code yet, so cp still holds the
252 // incoming context.
251 __ CallRuntime(Runtime::kTraceEnter, 0); 253 __ CallRuntime(Runtime::kTraceEnter, 0);
252 } 254 }
253 return !is_aborted(); 255 return !is_aborted();
254 } 256 }
255 257
256 258
257 void LCodeGen::GenerateOsrPrologue() { 259 void LCodeGen::GenerateOsrPrologue() {
258 // Generate the OSR entry prologue at the first unknown OSR value, or if there 260 // Generate the OSR entry prologue at the first unknown OSR value, or if there
259 // are none, at the OSR entrypoint instruction. 261 // are none, at the OSR entrypoint instruction.
260 if (osr_pc_offset_ >= 0) return; 262 if (osr_pc_offset_ >= 0) return;
(...skipping 491 matching lines...) Expand 10 before | Expand all | Expand 10 after
752 ASSERT(instr != NULL); 754 ASSERT(instr != NULL);
753 LPointerMap* pointers = instr->pointer_map(); 755 LPointerMap* pointers = instr->pointer_map();
754 ASSERT(pointers != NULL); 756 ASSERT(pointers != NULL);
755 RecordPosition(pointers->position()); 757 RecordPosition(pointers->position());
756 758
757 __ CallRuntime(function, num_arguments); 759 __ CallRuntime(function, num_arguments);
758 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 760 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
759 } 761 }
760 762
761 763
764 void LCodeGen::LoadContextFromDeferred(LOperand* context) {
765 if (context->IsRegister()) {
766 __ Move(cp, ToRegister(context));
767 } else if (context->IsStackSlot()) {
768 __ ldr(cp, ToMemOperand(context));
769 } else {
770 UNREACHABLE();
771 }
772 }
773
774
762 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 775 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
763 int argc, 776 int argc,
764 LInstruction* instr) { 777 LInstruction* instr,
778 LOperand* context) {
779 LoadContextFromDeferred(context);
765 __ CallRuntimeSaveDoubles(id); 780 __ CallRuntimeSaveDoubles(id);
766 RecordSafepointWithRegisters( 781 RecordSafepointWithRegisters(
767 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); 782 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
768 } 783 }
769 784
770 785
771 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, 786 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
772 Safepoint::DeoptMode mode) { 787 Safepoint::DeoptMode mode) {
773 if (!environment->HasBeenRegistered()) { 788 if (!environment->HasBeenRegistered()) {
774 // Physical stack frame layout: 789 // Physical stack frame layout:
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after
970 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 985 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
971 kind, arguments, deopt_mode); 986 kind, arguments, deopt_mode);
972 for (int i = 0; i < operands->length(); i++) { 987 for (int i = 0; i < operands->length(); i++) {
973 LOperand* pointer = operands->at(i); 988 LOperand* pointer = operands->at(i);
974 if (pointer->IsStackSlot()) { 989 if (pointer->IsStackSlot()) {
975 safepoint.DefinePointerSlot(pointer->index(), zone()); 990 safepoint.DefinePointerSlot(pointer->index(), zone());
976 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 991 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
977 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); 992 safepoint.DefinePointerRegister(ToRegister(pointer), zone());
978 } 993 }
979 } 994 }
980 if (kind & Safepoint::kWithRegisters) {
981 // Register cp always contains a pointer to the context.
982 safepoint.DefinePointerRegister(cp, zone());
983 }
984 } 995 }
985 996
986 997
987 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 998 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
988 Safepoint::DeoptMode deopt_mode) { 999 Safepoint::DeoptMode deopt_mode) {
989 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); 1000 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
990 } 1001 }
991 1002
992 1003
993 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { 1004 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1066 DoGap(instr); 1077 DoGap(instr);
1067 } 1078 }
1068 1079
1069 1080
1070 void LCodeGen::DoParameter(LParameter* instr) { 1081 void LCodeGen::DoParameter(LParameter* instr) {
1071 // Nothing to do. 1082 // Nothing to do.
1072 } 1083 }
1073 1084
1074 1085
1075 void LCodeGen::DoCallStub(LCallStub* instr) { 1086 void LCodeGen::DoCallStub(LCallStub* instr) {
1087 ASSERT(ToRegister(instr->context()).is(cp));
1076 ASSERT(ToRegister(instr->result()).is(r0)); 1088 ASSERT(ToRegister(instr->result()).is(r0));
1077 switch (instr->hydrogen()->major_key()) { 1089 switch (instr->hydrogen()->major_key()) {
1078 case CodeStub::RegExpConstructResult: { 1090 case CodeStub::RegExpConstructResult: {
1079 RegExpConstructResultStub stub; 1091 RegExpConstructResultStub stub;
1080 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1092 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1081 break; 1093 break;
1082 } 1094 }
1083 case CodeStub::RegExpExec: { 1095 case CodeStub::RegExpExec: {
1084 RegExpExecStub stub; 1096 RegExpExecStub stub;
1085 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1097 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
(...skipping 930 matching lines...) Expand 10 before | Expand all | Expand 10 after
2016 __ add(scratch, string, Operand(index, LSL, 1)); 2028 __ add(scratch, string, Operand(index, LSL, 1));
2017 __ strh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); 2029 __ strh(value, FieldMemOperand(scratch, SeqString::kHeaderSize));
2018 } 2030 }
2019 } 2031 }
2020 } 2032 }
2021 2033
2022 2034
2023 void LCodeGen::DoThrow(LThrow* instr) { 2035 void LCodeGen::DoThrow(LThrow* instr) {
2024 Register input_reg = EmitLoadRegister(instr->value(), ip); 2036 Register input_reg = EmitLoadRegister(instr->value(), ip);
2025 __ push(input_reg); 2037 __ push(input_reg);
2038 ASSERT(ToRegister(instr->context()).is(cp));
2026 CallRuntime(Runtime::kThrow, 1, instr); 2039 CallRuntime(Runtime::kThrow, 1, instr);
2027 2040
2028 if (FLAG_debug_code) { 2041 if (FLAG_debug_code) {
2029 __ stop("Unreachable code."); 2042 __ stop("Unreachable code.");
2030 } 2043 }
2031 } 2044 }
2032 2045
2033 2046
2034 void LCodeGen::DoAddI(LAddI* instr) { 2047 void LCodeGen::DoAddI(LAddI* instr) {
2035 LOperand* left = instr->left(); 2048 LOperand* left = instr->left();
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
2153 break; 2166 break;
2154 } 2167 }
2155 default: 2168 default:
2156 UNREACHABLE(); 2169 UNREACHABLE();
2157 break; 2170 break;
2158 } 2171 }
2159 } 2172 }
2160 2173
2161 2174
2162 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 2175 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2176 ASSERT(ToRegister(instr->context()).is(cp));
2163 ASSERT(ToRegister(instr->left()).is(r1)); 2177 ASSERT(ToRegister(instr->left()).is(r1));
2164 ASSERT(ToRegister(instr->right()).is(r0)); 2178 ASSERT(ToRegister(instr->right()).is(r0));
2165 ASSERT(ToRegister(instr->result()).is(r0)); 2179 ASSERT(ToRegister(instr->result()).is(r0));
2166 2180
2167 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 2181 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
2168 // Block literal pool emission to ensure nop indicating no inlined smi code 2182 // Block literal pool emission to ensure nop indicating no inlined smi code
2169 // is in the correct position. 2183 // is in the correct position.
2170 Assembler::BlockConstPoolScope block_const_pool(masm()); 2184 Assembler::BlockConstPoolScope block_const_pool(masm());
2171 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2185 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2172 __ nop(); // Signals no inlined code. 2186 __ nop(); // Signals no inlined code.
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
2564 case Token::GTE: 2578 case Token::GTE:
2565 return ge; 2579 return ge;
2566 default: 2580 default:
2567 UNREACHABLE(); 2581 UNREACHABLE();
2568 return kNoCondition; 2582 return kNoCondition;
2569 } 2583 }
2570 } 2584 }
2571 2585
2572 2586
2573 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { 2587 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2588 ASSERT(ToRegister(instr->context()).is(cp));
2574 Token::Value op = instr->op(); 2589 Token::Value op = instr->op();
2575 2590
2576 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2591 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2577 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2592 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2578 // This instruction also signals no smi code inlined. 2593 // This instruction also signals no smi code inlined.
2579 __ cmp(r0, Operand::Zero()); 2594 __ cmp(r0, Operand::Zero());
2580 2595
2581 Condition condition = ComputeCompareCondition(op); 2596 Condition condition = ComputeCompareCondition(op);
2582 2597
2583 EmitBranch(instr, condition); 2598 EmitBranch(instr, condition);
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
2724 Register reg = ToRegister(instr->value()); 2739 Register reg = ToRegister(instr->value());
2725 Register temp = ToRegister(instr->temp()); 2740 Register temp = ToRegister(instr->temp());
2726 2741
2727 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); 2742 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2728 __ cmp(temp, Operand(instr->map())); 2743 __ cmp(temp, Operand(instr->map()));
2729 EmitBranch(instr, eq); 2744 EmitBranch(instr, eq);
2730 } 2745 }
2731 2746
2732 2747
2733 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2748 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2749 ASSERT(ToRegister(instr->context()).is(cp));
2734 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. 2750 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
2735 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. 2751 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
2736 2752
2737 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 2753 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2738 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2754 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2739 2755
2740 __ cmp(r0, Operand::Zero()); 2756 __ cmp(r0, Operand::Zero());
2741 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); 2757 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2742 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); 2758 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
2743 } 2759 }
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2833 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2849 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2834 flags = static_cast<InstanceofStub::Flags>( 2850 flags = static_cast<InstanceofStub::Flags>(
2835 flags | InstanceofStub::kArgsInRegisters); 2851 flags | InstanceofStub::kArgsInRegisters);
2836 flags = static_cast<InstanceofStub::Flags>( 2852 flags = static_cast<InstanceofStub::Flags>(
2837 flags | InstanceofStub::kCallSiteInlineCheck); 2853 flags | InstanceofStub::kCallSiteInlineCheck);
2838 flags = static_cast<InstanceofStub::Flags>( 2854 flags = static_cast<InstanceofStub::Flags>(
2839 flags | InstanceofStub::kReturnTrueFalseObject); 2855 flags | InstanceofStub::kReturnTrueFalseObject);
2840 InstanceofStub stub(flags); 2856 InstanceofStub stub(flags);
2841 2857
2842 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 2858 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2859 LoadContextFromDeferred(instr->context());
2843 2860
2844 // Get the temp register reserved by the instruction. This needs to be r4 as 2861 // Get the temp register reserved by the instruction. This needs to be r4 as
2845 // its slot of the pushing of safepoint registers is used to communicate the 2862 // its slot of the pushing of safepoint registers is used to communicate the
2846 // offset to the location of the map check. 2863 // offset to the location of the map check.
2847 Register temp = ToRegister(instr->temp()); 2864 Register temp = ToRegister(instr->temp());
2848 ASSERT(temp.is(r4)); 2865 ASSERT(temp.is(r4));
2849 __ LoadHeapObject(InstanceofStub::right(), instr->function()); 2866 __ LoadHeapObject(InstanceofStub::right(), instr->function());
2850 static const int kAdditionalDelta = 5; 2867 static const int kAdditionalDelta = 5;
2851 // Make sure that code size is predicable, since we use specific constants 2868 // Make sure that code size is predicable, since we use specific constants
2852 // offsets in the code to find embedded values.. 2869 // offsets in the code to find embedded values..
(...skipping 24 matching lines...) Expand all
2877 2894
2878 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { 2895 void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
2879 Register object = ToRegister(instr->object()); 2896 Register object = ToRegister(instr->object());
2880 Register result = ToRegister(instr->result()); 2897 Register result = ToRegister(instr->result());
2881 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); 2898 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset));
2882 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); 2899 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset));
2883 } 2900 }
2884 2901
2885 2902
2886 void LCodeGen::DoCmpT(LCmpT* instr) { 2903 void LCodeGen::DoCmpT(LCmpT* instr) {
2904 ASSERT(ToRegister(instr->context()).is(cp));
2887 Token::Value op = instr->op(); 2905 Token::Value op = instr->op();
2888 2906
2889 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2907 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2890 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2908 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2891 // This instruction also signals no smi code inlined. 2909 // This instruction also signals no smi code inlined.
2892 __ cmp(r0, Operand::Zero()); 2910 __ cmp(r0, Operand::Zero());
2893 2911
2894 Condition condition = ComputeCompareCondition(op); 2912 Condition condition = ComputeCompareCondition(op);
2895 __ LoadRoot(ToRegister(instr->result()), 2913 __ LoadRoot(ToRegister(instr->result()),
2896 Heap::kTrueValueRootIndex, 2914 Heap::kTrueValueRootIndex,
2897 condition); 2915 condition);
2898 __ LoadRoot(ToRegister(instr->result()), 2916 __ LoadRoot(ToRegister(instr->result()),
2899 Heap::kFalseValueRootIndex, 2917 Heap::kFalseValueRootIndex,
2900 NegateCondition(condition)); 2918 NegateCondition(condition));
2901 } 2919 }
2902 2920
2903 2921
2904 void LCodeGen::DoReturn(LReturn* instr) { 2922 void LCodeGen::DoReturn(LReturn* instr) {
2905 if (FLAG_trace && info()->IsOptimizing()) { 2923 if (FLAG_trace && info()->IsOptimizing()) {
2906 // Push the return value on the stack as the parameter. 2924 // Push the return value on the stack as the parameter.
2907 // Runtime::TraceExit returns its parameter in r0. 2925 // Runtime::TraceExit returns its parameter in r0. We're leaving the code
2926 // managed by the register allocator and tearing down the frame, it's
2927 // safe to write to the context register.
2908 __ push(r0); 2928 __ push(r0);
2929 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2909 __ CallRuntime(Runtime::kTraceExit, 1); 2930 __ CallRuntime(Runtime::kTraceExit, 1);
2910 } 2931 }
2911 if (info()->saves_caller_doubles()) { 2932 if (info()->saves_caller_doubles()) {
2912 ASSERT(NeedsEagerFrame()); 2933 ASSERT(NeedsEagerFrame());
2913 BitVector* doubles = chunk()->allocated_double_registers(); 2934 BitVector* doubles = chunk()->allocated_double_registers();
2914 BitVector::Iterator save_iterator(doubles); 2935 BitVector::Iterator save_iterator(doubles);
2915 int count = 0; 2936 int count = 0;
2916 while (!save_iterator.Done()) { 2937 while (!save_iterator.Done()) {
2917 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), 2938 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
2918 MemOperand(sp, count * kDoubleSize)); 2939 MemOperand(sp, count * kDoubleSize));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2953 __ ldr(result, FieldMemOperand(ip, Cell::kValueOffset)); 2974 __ ldr(result, FieldMemOperand(ip, Cell::kValueOffset));
2954 if (instr->hydrogen()->RequiresHoleCheck()) { 2975 if (instr->hydrogen()->RequiresHoleCheck()) {
2955 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 2976 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2956 __ cmp(result, ip); 2977 __ cmp(result, ip);
2957 DeoptimizeIf(eq, instr->environment()); 2978 DeoptimizeIf(eq, instr->environment());
2958 } 2979 }
2959 } 2980 }
2960 2981
2961 2982
2962 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 2983 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2984 ASSERT(ToRegister(instr->context()).is(cp));
2963 ASSERT(ToRegister(instr->global_object()).is(r0)); 2985 ASSERT(ToRegister(instr->global_object()).is(r0));
2964 ASSERT(ToRegister(instr->result()).is(r0)); 2986 ASSERT(ToRegister(instr->result()).is(r0));
2965 2987
2966 __ mov(r2, Operand(instr->name())); 2988 __ mov(r2, Operand(instr->name()));
2967 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET 2989 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2968 : RelocInfo::CODE_TARGET_CONTEXT; 2990 : RelocInfo::CODE_TARGET_CONTEXT;
2969 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2991 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2970 CallCode(ic, mode, instr); 2992 CallCode(ic, mode, instr);
2971 } 2993 }
2972 2994
(...skipping 17 matching lines...) Expand all
2990 DeoptimizeIf(eq, instr->environment()); 3012 DeoptimizeIf(eq, instr->environment());
2991 } 3013 }
2992 3014
2993 // Store the value. 3015 // Store the value.
2994 __ str(value, FieldMemOperand(cell, Cell::kValueOffset)); 3016 __ str(value, FieldMemOperand(cell, Cell::kValueOffset));
2995 // Cells are always rescanned, so no write barrier here. 3017 // Cells are always rescanned, so no write barrier here.
2996 } 3018 }
2997 3019
2998 3020
2999 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 3021 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
3022 ASSERT(ToRegister(instr->context()).is(cp));
3000 ASSERT(ToRegister(instr->global_object()).is(r1)); 3023 ASSERT(ToRegister(instr->global_object()).is(r1));
3001 ASSERT(ToRegister(instr->value()).is(r0)); 3024 ASSERT(ToRegister(instr->value()).is(r0));
3002 3025
3003 __ mov(r2, Operand(instr->name())); 3026 __ mov(r2, Operand(instr->name()));
3004 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 3027 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
3005 ? isolate()->builtins()->StoreIC_Initialize_Strict() 3028 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3006 : isolate()->builtins()->StoreIC_Initialize(); 3029 : isolate()->builtins()->StoreIC_Initialize();
3007 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 3030 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
3008 } 3031 }
3009 3032
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
3083 if (access.IsInobject()) { 3106 if (access.IsInobject()) {
3084 __ ldr(result, FieldMemOperand(object, offset)); 3107 __ ldr(result, FieldMemOperand(object, offset));
3085 } else { 3108 } else {
3086 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 3109 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
3087 __ ldr(result, FieldMemOperand(result, offset)); 3110 __ ldr(result, FieldMemOperand(result, offset));
3088 } 3111 }
3089 } 3112 }
3090 3113
3091 3114
3092 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 3115 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3116 ASSERT(ToRegister(instr->context()).is(cp));
3093 ASSERT(ToRegister(instr->object()).is(r0)); 3117 ASSERT(ToRegister(instr->object()).is(r0));
3094 ASSERT(ToRegister(instr->result()).is(r0)); 3118 ASSERT(ToRegister(instr->result()).is(r0));
3095 3119
3096 // Name is always in r2. 3120 // Name is always in r2.
3097 __ mov(r2, Operand(instr->name())); 3121 __ mov(r2, Operand(instr->name()));
3098 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3122 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3099 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 3123 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3100 } 3124 }
3101 3125
3102 3126
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
3376 if (shift_size >= 0) { 3400 if (shift_size >= 0) {
3377 return MemOperand(base, scratch0(), LSL, shift_size); 3401 return MemOperand(base, scratch0(), LSL, shift_size);
3378 } else { 3402 } else {
3379 ASSERT_EQ(-1, shift_size); 3403 ASSERT_EQ(-1, shift_size);
3380 return MemOperand(base, scratch0(), LSR, 1); 3404 return MemOperand(base, scratch0(), LSR, 1);
3381 } 3405 }
3382 } 3406 }
3383 3407
3384 3408
3385 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 3409 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3410 ASSERT(ToRegister(instr->context()).is(cp));
3386 ASSERT(ToRegister(instr->object()).is(r1)); 3411 ASSERT(ToRegister(instr->object()).is(r1));
3387 ASSERT(ToRegister(instr->key()).is(r0)); 3412 ASSERT(ToRegister(instr->key()).is(r0));
3388 3413
3389 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 3414 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
3390 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 3415 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3391 } 3416 }
3392 3417
3393 3418
3394 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3419 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3395 Register scratch = scratch0(); 3420 Register scratch = scratch0();
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
3520 ASSERT(instr->HasPointerMap()); 3545 ASSERT(instr->HasPointerMap());
3521 LPointerMap* pointers = instr->pointer_map(); 3546 LPointerMap* pointers = instr->pointer_map();
3522 RecordPosition(pointers->position()); 3547 RecordPosition(pointers->position());
3523 SafepointGenerator safepoint_generator( 3548 SafepointGenerator safepoint_generator(
3524 this, pointers, Safepoint::kLazyDeopt); 3549 this, pointers, Safepoint::kLazyDeopt);
3525 // The number of arguments is stored in receiver which is r0, as expected 3550 // The number of arguments is stored in receiver which is r0, as expected
3526 // by InvokeFunction. 3551 // by InvokeFunction.
3527 ParameterCount actual(receiver); 3552 ParameterCount actual(receiver);
3528 __ InvokeFunction(function, actual, CALL_FUNCTION, 3553 __ InvokeFunction(function, actual, CALL_FUNCTION,
3529 safepoint_generator, CALL_AS_METHOD); 3554 safepoint_generator, CALL_AS_METHOD);
3530 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3531 } 3555 }
3532 3556
3533 3557
3534 void LCodeGen::DoPushArgument(LPushArgument* instr) { 3558 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3535 LOperand* argument = instr->value(); 3559 LOperand* argument = instr->value();
3536 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { 3560 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
3537 Abort(kDoPushArgumentNotImplementedForDoubleType); 3561 Abort(kDoPushArgumentNotImplementedForDoubleType);
3538 } else { 3562 } else {
3539 Register argument_reg = EmitLoadRegister(argument, ip); 3563 Register argument_reg = EmitLoadRegister(argument, ip);
3540 __ push(argument_reg); 3564 __ push(argument_reg);
3541 } 3565 }
3542 } 3566 }
3543 3567
3544 3568
3545 void LCodeGen::DoDrop(LDrop* instr) { 3569 void LCodeGen::DoDrop(LDrop* instr) {
3546 __ Drop(instr->count()); 3570 __ Drop(instr->count());
3547 } 3571 }
3548 3572
3549 3573
3550 void LCodeGen::DoThisFunction(LThisFunction* instr) { 3574 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3551 Register result = ToRegister(instr->result()); 3575 Register result = ToRegister(instr->result());
3552 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 3576 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3553 } 3577 }
3554 3578
3555 3579
3556 void LCodeGen::DoContext(LContext* instr) { 3580 void LCodeGen::DoContext(LContext* instr) {
3557 // If there is a non-return use, the context must be moved to a register. 3581 // If there is a non-return use, the context must be moved to a register.
3558 Register result = ToRegister(instr->result()); 3582 Register result = ToRegister(instr->result());
3559 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) { 3583 if (info()->IsOptimizing()) {
3560 if (!it.value()->IsReturn()) { 3584 __ ldr(result, MemOperand(fp, StandardFrameConstants::kContextOffset));
3561 __ mov(result, cp); 3585 } else {
3562 return; 3586 // If there is no frame, the context must be in cp.
3563 } 3587 ASSERT(result.is(cp));
3564 } 3588 }
3565 } 3589 }
3566 3590
3567 3591
3568 void LCodeGen::DoOuterContext(LOuterContext* instr) { 3592 void LCodeGen::DoOuterContext(LOuterContext* instr) {
3569 Register context = ToRegister(instr->context()); 3593 Register context = ToRegister(instr->context());
3570 Register result = ToRegister(instr->result()); 3594 Register result = ToRegister(instr->result());
3571 __ ldr(result, 3595 __ ldr(result,
3572 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); 3596 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
3573 } 3597 }
3574 3598
3575 3599
3576 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3600 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3601 ASSERT(ToRegister(instr->context()).is(cp));
3577 __ push(cp); // The context is the first argument. 3602 __ push(cp); // The context is the first argument.
3578 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); 3603 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
3579 __ push(scratch0()); 3604 __ push(scratch0());
3580 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); 3605 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
3581 __ push(scratch0()); 3606 __ push(scratch0());
3582 CallRuntime(Runtime::kDeclareGlobals, 3, instr); 3607 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3583 } 3608 }
3584 3609
3585 3610
3586 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { 3611 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
3612 Register context = ToRegister(instr->context());
3587 Register result = ToRegister(instr->result()); 3613 Register result = ToRegister(instr->result());
3588 __ ldr(result, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3614 __ ldr(result, ContextOperand(context, Context::GLOBAL_OBJECT_INDEX));
3589 } 3615 }
3590 3616
3591 3617
3592 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { 3618 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
3593 Register global = ToRegister(instr->global_object()); 3619 Register global = ToRegister(instr->global_object());
3594 Register result = ToRegister(instr->result()); 3620 Register result = ToRegister(instr->result());
3595 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset)); 3621 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
3596 } 3622 }
3597 3623
3598 3624
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3631 3657
3632 // Set up deoptimization. 3658 // Set up deoptimization.
3633 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 3659 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3634 } else { 3660 } else {
3635 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3661 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3636 ParameterCount count(arity); 3662 ParameterCount count(arity);
3637 ParameterCount expected(formal_parameter_count); 3663 ParameterCount expected(formal_parameter_count);
3638 __ InvokeFunction( 3664 __ InvokeFunction(
3639 function, expected, count, CALL_FUNCTION, generator, call_kind); 3665 function, expected, count, CALL_FUNCTION, generator, call_kind);
3640 } 3666 }
3641
3642 // Restore context.
3643 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3644 } 3667 }
3645 3668
3646 3669
3647 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 3670 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
3648 ASSERT(ToRegister(instr->result()).is(r0)); 3671 ASSERT(ToRegister(instr->result()).is(r0));
3649 CallKnownFunction(instr->hydrogen()->function(), 3672 CallKnownFunction(instr->hydrogen()->function(),
3650 instr->hydrogen()->formal_parameter_count(), 3673 instr->hydrogen()->formal_parameter_count(),
3651 instr->arity(), 3674 instr->arity(),
3652 instr, 3675 instr,
3653 CALL_AS_METHOD, 3676 CALL_AS_METHOD,
3654 R1_UNINITIALIZED); 3677 R1_UNINITIALIZED);
3655 } 3678 }
3656 3679
3657 3680
3658 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { 3681 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3682 ASSERT(instr->context() != NULL);
3683 ASSERT(ToRegister(instr->context()).is(cp));
ulan 2013/09/27 12:08:00 I think we don't need context in cp here since it
vincent.belliard.fr 2013/09/27 12:37:04 It is just to be consistent with LChunkBuilder::Do
3659 Register input = ToRegister(instr->value()); 3684 Register input = ToRegister(instr->value());
3660 Register result = ToRegister(instr->result()); 3685 Register result = ToRegister(instr->result());
3661 Register scratch = scratch0(); 3686 Register scratch = scratch0();
3662 3687
3663 // Deoptimize if not a heap number. 3688 // Deoptimize if not a heap number.
3664 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 3689 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3665 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 3690 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3666 __ cmp(scratch, Operand(ip)); 3691 __ cmp(scratch, Operand(ip));
3667 DeoptimizeIf(ne, instr->environment()); 3692 DeoptimizeIf(ne, instr->environment());
3668 3693
(...skipping 23 matching lines...) Expand all
3692 // exponent: floating point exponent value. 3717 // exponent: floating point exponent value.
3693 3718
3694 Label allocated, slow; 3719 Label allocated, slow;
3695 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex); 3720 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3696 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow); 3721 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3697 __ b(&allocated); 3722 __ b(&allocated);
3698 3723
3699 // Slow case: Call the runtime system to do the number allocation. 3724 // Slow case: Call the runtime system to do the number allocation.
3700 __ bind(&slow); 3725 __ bind(&slow);
3701 3726
3702 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 3727 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr,
3728 instr->context());
3703 // Set the pointer to the new heap number in tmp. 3729 // Set the pointer to the new heap number in tmp.
3704 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0)); 3730 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
3705 // Restore input_reg after call to runtime. 3731 // Restore input_reg after call to runtime.
3706 __ LoadFromSafepointRegisterSlot(input, input); 3732 __ LoadFromSafepointRegisterSlot(input, input);
3707 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); 3733 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3708 3734
3709 __ bind(&allocated); 3735 __ bind(&allocated);
3710 // exponent: floating point exponent value. 3736 // exponent: floating point exponent value.
3711 // tmp1: allocated heap number. 3737 // tmp1: allocated heap number.
3712 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask)); 3738 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after
3963 Register temp2 = ToRegister(instr->temp2()); 3989 Register temp2 = ToRegister(instr->temp2());
3964 3990
3965 MathExpGenerator::EmitMathExp( 3991 MathExpGenerator::EmitMathExp(
3966 masm(), input, result, double_scratch1, double_scratch2, 3992 masm(), input, result, double_scratch1, double_scratch2,
3967 temp1, temp2, scratch0()); 3993 temp1, temp2, scratch0());
3968 } 3994 }
3969 3995
3970 3996
3971 void LCodeGen::DoMathLog(LMathLog* instr) { 3997 void LCodeGen::DoMathLog(LMathLog* instr) {
3972 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 3998 ASSERT(ToDoubleRegister(instr->result()).is(d2));
3999 // Set the context register to a GC-safe fake value. Clobbering it is
4000 // OK because this instruction is marked as a call.
4001 __ mov(cp, Operand::Zero());
3973 TranscendentalCacheStub stub(TranscendentalCache::LOG, 4002 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3974 TranscendentalCacheStub::UNTAGGED); 4003 TranscendentalCacheStub::UNTAGGED);
3975 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4004 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3976 } 4005 }
3977 4006
3978 4007
3979 void LCodeGen::DoMathTan(LMathTan* instr) { 4008 void LCodeGen::DoMathTan(LMathTan* instr) {
3980 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4009 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4010 // Set the context register to a GC-safe fake value. Clobbering it is
4011 // OK because this instruction is marked as a call.
4012 __ mov(cp, Operand::Zero());
3981 TranscendentalCacheStub stub(TranscendentalCache::TAN, 4013 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3982 TranscendentalCacheStub::UNTAGGED); 4014 TranscendentalCacheStub::UNTAGGED);
3983 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4015 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3984 } 4016 }
3985 4017
3986 4018
3987 void LCodeGen::DoMathCos(LMathCos* instr) { 4019 void LCodeGen::DoMathCos(LMathCos* instr) {
3988 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4020 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4021 // Set the context register to a GC-safe fake value. Clobbering it is
4022 // OK because this instruction is marked as a call.
4023 __ mov(cp, Operand::Zero());
3989 TranscendentalCacheStub stub(TranscendentalCache::COS, 4024 TranscendentalCacheStub stub(TranscendentalCache::COS,
3990 TranscendentalCacheStub::UNTAGGED); 4025 TranscendentalCacheStub::UNTAGGED);
3991 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4026 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3992 } 4027 }
3993 4028
3994 4029
3995 void LCodeGen::DoMathSin(LMathSin* instr) { 4030 void LCodeGen::DoMathSin(LMathSin* instr) {
3996 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4031 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4032 // Set the context register to a GC-safe fake value. Clobbering it is
4033 // OK because this instruction is marked as a call.
4034 __ mov(cp, Operand::Zero());
3997 TranscendentalCacheStub stub(TranscendentalCache::SIN, 4035 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3998 TranscendentalCacheStub::UNTAGGED); 4036 TranscendentalCacheStub::UNTAGGED);
3999 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4037 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4000 } 4038 }
4001 4039
4002 4040
4003 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 4041 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
4042 ASSERT(ToRegister(instr->context()).is(cp));
4004 ASSERT(ToRegister(instr->function()).is(r1)); 4043 ASSERT(ToRegister(instr->function()).is(r1));
4005 ASSERT(instr->HasPointerMap()); 4044 ASSERT(instr->HasPointerMap());
4006 4045
4007 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); 4046 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
4008 if (known_function.is_null()) { 4047 if (known_function.is_null()) {
4009 LPointerMap* pointers = instr->pointer_map(); 4048 LPointerMap* pointers = instr->pointer_map();
4010 RecordPosition(pointers->position()); 4049 RecordPosition(pointers->position());
4011 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 4050 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
4012 ParameterCount count(instr->arity()); 4051 ParameterCount count(instr->arity());
4013 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 4052 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
4014 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4015 } else { 4053 } else {
4016 CallKnownFunction(known_function, 4054 CallKnownFunction(known_function,
4017 instr->hydrogen()->formal_parameter_count(), 4055 instr->hydrogen()->formal_parameter_count(),
4018 instr->arity(), 4056 instr->arity(),
4019 instr, 4057 instr,
4020 CALL_AS_METHOD, 4058 CALL_AS_METHOD,
4021 R1_CONTAINS_TARGET); 4059 R1_CONTAINS_TARGET);
4022 } 4060 }
4023 } 4061 }
4024 4062
4025 4063
4026 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 4064 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
4065 ASSERT(ToRegister(instr->context()).is(cp));
4027 ASSERT(ToRegister(instr->result()).is(r0)); 4066 ASSERT(ToRegister(instr->result()).is(r0));
4028 4067
4029 int arity = instr->arity(); 4068 int arity = instr->arity();
4030 Handle<Code> ic = 4069 Handle<Code> ic =
4031 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); 4070 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
4032 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4071 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4033 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4034 } 4072 }
4035 4073
4036 4074
4037 void LCodeGen::DoCallNamed(LCallNamed* instr) { 4075 void LCodeGen::DoCallNamed(LCallNamed* instr) {
4076 ASSERT(ToRegister(instr->context()).is(cp));
4038 ASSERT(ToRegister(instr->result()).is(r0)); 4077 ASSERT(ToRegister(instr->result()).is(r0));
4039 4078
4040 int arity = instr->arity(); 4079 int arity = instr->arity();
4041 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 4080 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4042 Handle<Code> ic = 4081 Handle<Code> ic =
4043 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 4082 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
4044 __ mov(r2, Operand(instr->name())); 4083 __ mov(r2, Operand(instr->name()));
4045 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS); 4084 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
4046 // Restore context register.
4047 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4048 } 4085 }
4049 4086
4050 4087
4051 void LCodeGen::DoCallFunction(LCallFunction* instr) { 4088 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4089 ASSERT(ToRegister(instr->context()).is(cp));
4052 ASSERT(ToRegister(instr->function()).is(r1)); 4090 ASSERT(ToRegister(instr->function()).is(r1));
4053 ASSERT(ToRegister(instr->result()).is(r0)); 4091 ASSERT(ToRegister(instr->result()).is(r0));
4054 4092
4055 int arity = instr->arity(); 4093 int arity = instr->arity();
4056 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); 4094 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
4057 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4095 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4058 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4059 } 4096 }
4060 4097
4061 4098
4062 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 4099 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
4100 ASSERT(ToRegister(instr->context()).is(cp));
4063 ASSERT(ToRegister(instr->result()).is(r0)); 4101 ASSERT(ToRegister(instr->result()).is(r0));
4064 4102
4065 int arity = instr->arity(); 4103 int arity = instr->arity();
4066 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 4104 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
4067 Handle<Code> ic = 4105 Handle<Code> ic =
4068 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 4106 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
4069 __ mov(r2, Operand(instr->name())); 4107 __ mov(r2, Operand(instr->name()));
4070 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS); 4108 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
4071 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4072 } 4109 }
4073 4110
4074 4111
4075 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 4112 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
4076 ASSERT(ToRegister(instr->result()).is(r0)); 4113 ASSERT(ToRegister(instr->result()).is(r0));
4077 CallKnownFunction(instr->hydrogen()->target(), 4114 CallKnownFunction(instr->hydrogen()->target(),
4078 instr->hydrogen()->formal_parameter_count(), 4115 instr->hydrogen()->formal_parameter_count(),
4079 instr->arity(), 4116 instr->arity(),
4080 instr, 4117 instr,
4081 CALL_AS_FUNCTION, 4118 CALL_AS_FUNCTION,
4082 R1_UNINITIALIZED); 4119 R1_UNINITIALIZED);
4083 } 4120 }
4084 4121
4085 4122
4086 void LCodeGen::DoCallNew(LCallNew* instr) { 4123 void LCodeGen::DoCallNew(LCallNew* instr) {
4124 ASSERT(ToRegister(instr->context()).is(cp));
4087 ASSERT(ToRegister(instr->constructor()).is(r1)); 4125 ASSERT(ToRegister(instr->constructor()).is(r1));
4088 ASSERT(ToRegister(instr->result()).is(r0)); 4126 ASSERT(ToRegister(instr->result()).is(r0));
4089 4127
4090 __ mov(r0, Operand(instr->arity())); 4128 __ mov(r0, Operand(instr->arity()));
4091 // No cell in r2 for construct type feedback in optimized code 4129 // No cell in r2 for construct type feedback in optimized code
4092 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); 4130 Handle<Object> undefined_value(isolate()->factory()->undefined_value());
4093 __ mov(r2, Operand(undefined_value)); 4131 __ mov(r2, Operand(undefined_value));
4094 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 4132 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
4095 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4133 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
4096 } 4134 }
4097 4135
4098 4136
4099 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { 4137 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4138 ASSERT(ToRegister(instr->context()).is(cp));
4100 ASSERT(ToRegister(instr->constructor()).is(r1)); 4139 ASSERT(ToRegister(instr->constructor()).is(r1));
4101 ASSERT(ToRegister(instr->result()).is(r0)); 4140 ASSERT(ToRegister(instr->result()).is(r0));
4102 4141
4103 __ mov(r0, Operand(instr->arity())); 4142 __ mov(r0, Operand(instr->arity()));
4104 __ mov(r2, Operand(instr->hydrogen()->property_cell())); 4143 __ mov(r2, Operand(instr->hydrogen()->property_cell()));
4105 ElementsKind kind = instr->hydrogen()->elements_kind(); 4144 ElementsKind kind = instr->hydrogen()->elements_kind();
4106 AllocationSiteOverrideMode override_mode = 4145 AllocationSiteOverrideMode override_mode =
4107 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) 4146 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
4108 ? DISABLE_ALLOCATION_SITES 4147 ? DISABLE_ALLOCATION_SITES
4109 : DONT_OVERRIDE; 4148 : DONT_OVERRIDE;
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
4241 GetLinkRegisterState(), 4280 GetLinkRegisterState(),
4242 kSaveFPRegs, 4281 kSaveFPRegs,
4243 EMIT_REMEMBERED_SET, 4282 EMIT_REMEMBERED_SET,
4244 check_needed); 4283 check_needed);
4245 } 4284 }
4246 } 4285 }
4247 } 4286 }
4248 4287
4249 4288
4250 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 4289 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4290 ASSERT(ToRegister(instr->context()).is(cp));
4251 ASSERT(ToRegister(instr->object()).is(r1)); 4291 ASSERT(ToRegister(instr->object()).is(r1));
4252 ASSERT(ToRegister(instr->value()).is(r0)); 4292 ASSERT(ToRegister(instr->value()).is(r0));
4253 4293
4254 // Name is always in r2. 4294 // Name is always in r2.
4255 __ mov(r2, Operand(instr->name())); 4295 __ mov(r2, Operand(instr->name()));
4256 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4296 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4257 ? isolate()->builtins()->StoreIC_Initialize_Strict() 4297 ? isolate()->builtins()->StoreIC_Initialize_Strict()
4258 : isolate()->builtins()->StoreIC_Initialize(); 4298 : isolate()->builtins()->StoreIC_Initialize();
4259 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4299 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4260 } 4300 }
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
4464 DoStoreKeyedExternalArray(instr); 4504 DoStoreKeyedExternalArray(instr);
4465 } else if (instr->hydrogen()->value()->representation().IsDouble()) { 4505 } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4466 DoStoreKeyedFixedDoubleArray(instr); 4506 DoStoreKeyedFixedDoubleArray(instr);
4467 } else { 4507 } else {
4468 DoStoreKeyedFixedArray(instr); 4508 DoStoreKeyedFixedArray(instr);
4469 } 4509 }
4470 } 4510 }
4471 4511
4472 4512
4473 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 4513 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4514 ASSERT(ToRegister(instr->context()).is(cp));
4474 ASSERT(ToRegister(instr->object()).is(r2)); 4515 ASSERT(ToRegister(instr->object()).is(r2));
4475 ASSERT(ToRegister(instr->key()).is(r1)); 4516 ASSERT(ToRegister(instr->key()).is(r1));
4476 ASSERT(ToRegister(instr->value()).is(r0)); 4517 ASSERT(ToRegister(instr->value()).is(r0));
4477 4518
4478 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4519 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4479 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 4520 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
4480 : isolate()->builtins()->KeyedStoreIC_Initialize(); 4521 : isolate()->builtins()->KeyedStoreIC_Initialize();
4481 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4522 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4482 } 4523 }
4483 4524
(...skipping 13 matching lines...) Expand all
4497 __ b(ne, &not_applicable); 4538 __ b(ne, &not_applicable);
4498 4539
4499 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { 4540 if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4500 Register new_map_reg = ToRegister(instr->new_map_temp()); 4541 Register new_map_reg = ToRegister(instr->new_map_temp());
4501 __ mov(new_map_reg, Operand(to_map)); 4542 __ mov(new_map_reg, Operand(to_map));
4502 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4543 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4503 // Write barrier. 4544 // Write barrier.
4504 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, 4545 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
4505 scratch, GetLinkRegisterState(), kDontSaveFPRegs); 4546 scratch, GetLinkRegisterState(), kDontSaveFPRegs);
4506 } else { 4547 } else {
4548 ASSERT(ToRegister(instr->context()).is(cp));
4507 PushSafepointRegistersScope scope( 4549 PushSafepointRegistersScope scope(
4508 this, Safepoint::kWithRegistersAndDoubles); 4550 this, Safepoint::kWithRegistersAndDoubles);
4509 __ Move(r0, object_reg); 4551 __ Move(r0, object_reg);
4510 __ Move(r1, to_map); 4552 __ Move(r1, to_map);
4511 TransitionElementsKindStub stub(from_kind, to_kind); 4553 TransitionElementsKindStub stub(from_kind, to_kind);
4512 __ CallStub(&stub); 4554 __ CallStub(&stub);
4513 RecordSafepointWithRegistersAndDoubles( 4555 RecordSafepointWithRegistersAndDoubles(
4514 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4556 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4515 } 4557 }
4516 __ bind(&not_applicable); 4558 __ bind(&not_applicable);
4517 } 4559 }
4518 4560
4519 4561
4520 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4562 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4521 Register object = ToRegister(instr->object()); 4563 Register object = ToRegister(instr->object());
4522 Register temp = ToRegister(instr->temp()); 4564 Register temp = ToRegister(instr->temp());
4523 __ TestJSArrayForAllocationMemento(object, temp); 4565 __ TestJSArrayForAllocationMemento(object, temp);
4524 DeoptimizeIf(eq, instr->environment()); 4566 DeoptimizeIf(eq, instr->environment());
4525 } 4567 }
4526 4568
4527 4569
4528 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4570 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4571 ASSERT(ToRegister(instr->context()).is(cp));
4529 __ push(ToRegister(instr->left())); 4572 __ push(ToRegister(instr->left()));
4530 __ push(ToRegister(instr->right())); 4573 __ push(ToRegister(instr->right()));
4531 StringAddStub stub(instr->hydrogen()->flags()); 4574 StringAddStub stub(instr->hydrogen()->flags());
4532 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4575 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4533 } 4576 }
4534 4577
4535 4578
4536 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4579 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4537 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { 4580 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode {
4538 public: 4581 public:
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
4574 // DoStringCharCodeAt above. 4617 // DoStringCharCodeAt above.
4575 if (instr->index()->IsConstantOperand()) { 4618 if (instr->index()->IsConstantOperand()) {
4576 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 4619 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
4577 __ mov(scratch, Operand(Smi::FromInt(const_index))); 4620 __ mov(scratch, Operand(Smi::FromInt(const_index)));
4578 __ push(scratch); 4621 __ push(scratch);
4579 } else { 4622 } else {
4580 Register index = ToRegister(instr->index()); 4623 Register index = ToRegister(instr->index());
4581 __ SmiTag(index); 4624 __ SmiTag(index);
4582 __ push(index); 4625 __ push(index);
4583 } 4626 }
4584 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); 4627 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr,
4628 instr->context());
4585 __ AssertSmi(r0); 4629 __ AssertSmi(r0);
4586 __ SmiUntag(r0); 4630 __ SmiUntag(r0);
4587 __ StoreToSafepointRegisterSlot(r0, result); 4631 __ StoreToSafepointRegisterSlot(r0, result);
4588 } 4632 }
4589 4633
4590 4634
4591 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 4635 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4592 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { 4636 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode {
4593 public: 4637 public:
4594 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) 4638 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
4626 Register result = ToRegister(instr->result()); 4670 Register result = ToRegister(instr->result());
4627 4671
4628 // TODO(3095996): Get rid of this. For now, we need to make the 4672 // TODO(3095996): Get rid of this. For now, we need to make the
4629 // result register contain a valid pointer because it is already 4673 // result register contain a valid pointer because it is already
4630 // contained in the register pointer map. 4674 // contained in the register pointer map.
4631 __ mov(result, Operand::Zero()); 4675 __ mov(result, Operand::Zero());
4632 4676
4633 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4677 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4634 __ SmiTag(char_code); 4678 __ SmiTag(char_code);
4635 __ push(char_code); 4679 __ push(char_code);
4636 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); 4680 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
4637 __ StoreToSafepointRegisterSlot(r0, result); 4681 __ StoreToSafepointRegisterSlot(r0, result);
4638 } 4682 }
4639 4683
4640 4684
4641 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4685 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4642 LOperand* input = instr->value(); 4686 LOperand* input = instr->value();
4643 ASSERT(input->IsRegister() || input->IsStackSlot()); 4687 ASSERT(input->IsRegister() || input->IsStackSlot());
4644 LOperand* output = instr->result(); 4688 LOperand* output = instr->result();
4645 ASSERT(output->IsDoubleRegister()); 4689 ASSERT(output->IsDoubleRegister());
4646 SwVfpRegister single_scratch = double_scratch0().low(); 4690 SwVfpRegister single_scratch = double_scratch0().low();
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
4765 } 4809 }
4766 4810
4767 // Slow case: Call the runtime system to do the number allocation. 4811 // Slow case: Call the runtime system to do the number allocation.
4768 __ bind(&slow); 4812 __ bind(&slow);
4769 4813
4770 // TODO(3095996): Put a valid pointer value in the stack slot where the result 4814 // TODO(3095996): Put a valid pointer value in the stack slot where the result
4771 // register is stored, as this register is in the pointer map, but contains an 4815 // register is stored, as this register is in the pointer map, but contains an
4772 // integer value. 4816 // integer value.
4773 __ mov(ip, Operand::Zero()); 4817 __ mov(ip, Operand::Zero());
4774 __ StoreToSafepointRegisterSlot(ip, dst); 4818 __ StoreToSafepointRegisterSlot(ip, dst);
4775 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 4819 // NumberTagI and NumberTagD use the context from the frame, rather than
4820 // the environment's HContext or HInlinedContext value.
4821 // They only call Runtime::kAllocateHeapNumber.
4822 // The corresponding HChange instructions are added in a phase that does
4823 // not have easy access to the local context.
4824 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4825 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4826 RecordSafepointWithRegisters(
4827 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4776 __ Move(dst, r0); 4828 __ Move(dst, r0);
4777 __ sub(dst, dst, Operand(kHeapObjectTag)); 4829 __ sub(dst, dst, Operand(kHeapObjectTag));
4778 4830
4779 // Done. Put the value in dbl_scratch into the value of the allocated heap 4831 // Done. Put the value in dbl_scratch into the value of the allocated heap
4780 // number. 4832 // number.
4781 __ bind(&done); 4833 __ bind(&done);
4782 __ vstr(dbl_scratch, dst, HeapNumber::kValueOffset); 4834 __ vstr(dbl_scratch, dst, HeapNumber::kValueOffset);
4783 __ add(dst, dst, Operand(kHeapObjectTag)); 4835 __ add(dst, dst, Operand(kHeapObjectTag));
4784 __ StoreToSafepointRegisterSlot(dst, dst); 4836 __ StoreToSafepointRegisterSlot(dst, dst);
4785 } 4837 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
4821 4873
4822 4874
4823 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 4875 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4824 // TODO(3095996): Get rid of this. For now, we need to make the 4876 // TODO(3095996): Get rid of this. For now, we need to make the
4825 // result register contain a valid pointer because it is already 4877 // result register contain a valid pointer because it is already
4826 // contained in the register pointer map. 4878 // contained in the register pointer map.
4827 Register reg = ToRegister(instr->result()); 4879 Register reg = ToRegister(instr->result());
4828 __ mov(reg, Operand::Zero()); 4880 __ mov(reg, Operand::Zero());
4829 4881
4830 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4882 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4831 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 4883 // NumberTagI and NumberTagD use the context from the frame, rather than
4884 // the environment's HContext or HInlinedContext value.
4885 // They only call Runtime::kAllocateHeapNumber.
4886 // The corresponding HChange instructions are added in a phase that does
4887 // not have easy access to the local context.
4888 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4889 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4890 RecordSafepointWithRegisters(
4891 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4832 __ sub(r0, r0, Operand(kHeapObjectTag)); 4892 __ sub(r0, r0, Operand(kHeapObjectTag));
4833 __ StoreToSafepointRegisterSlot(r0, reg); 4893 __ StoreToSafepointRegisterSlot(r0, reg);
4834 } 4894 }
4835 4895
4836 4896
4837 void LCodeGen::DoSmiTag(LSmiTag* instr) { 4897 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4838 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 4898 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
4839 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value())); 4899 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value()));
4840 } 4900 }
4841 4901
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after
5150 __ cmp(reg, Operand(object)); 5210 __ cmp(reg, Operand(object));
5151 } 5211 }
5152 DeoptimizeIf(ne, instr->environment()); 5212 DeoptimizeIf(ne, instr->environment());
5153 } 5213 }
5154 5214
5155 5215
5156 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { 5216 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
5157 { 5217 {
5158 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 5218 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
5159 __ push(object); 5219 __ push(object);
5160 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); 5220 __ mov(cp, Operand::Zero());
5221 __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
5222 RecordSafepointWithRegisters(
5223 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5161 __ StoreToSafepointRegisterSlot(r0, scratch0()); 5224 __ StoreToSafepointRegisterSlot(r0, scratch0());
5162 } 5225 }
5163 __ tst(scratch0(), Operand(kSmiTagMask)); 5226 __ tst(scratch0(), Operand(kSmiTagMask));
5164 DeoptimizeIf(eq, instr->environment()); 5227 DeoptimizeIf(eq, instr->environment());
5165 } 5228 }
5166 5229
5167 5230
5168 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 5231 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5169 class DeferredCheckMaps V8_FINAL : public LDeferredCode { 5232 class DeferredCheckMaps V8_FINAL : public LDeferredCode {
5170 public: 5233 public:
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
5353 __ SmiTag(size); 5416 __ SmiTag(size);
5354 __ push(size); 5417 __ push(size);
5355 } else { 5418 } else {
5356 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5419 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5357 __ Push(Smi::FromInt(size)); 5420 __ Push(Smi::FromInt(size));
5358 } 5421 }
5359 5422
5360 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { 5423 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5361 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); 5424 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5362 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5425 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5363 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); 5426 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr,
5427 instr->context());
5364 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { 5428 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5365 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5429 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5366 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); 5430 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr,
5431 instr->context());
5367 } else { 5432 } else {
5368 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); 5433 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr,
5434 instr->context());
5369 } 5435 }
5370 __ StoreToSafepointRegisterSlot(r0, result); 5436 __ StoreToSafepointRegisterSlot(r0, result);
5371 } 5437 }
5372 5438
5373 5439
5374 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5440 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5375 ASSERT(ToRegister(instr->value()).is(r0)); 5441 ASSERT(ToRegister(instr->value()).is(r0));
5376 __ push(r0); 5442 __ push(r0);
5377 CallRuntime(Runtime::kToFastProperties, 1, instr); 5443 CallRuntime(Runtime::kToFastProperties, 1, instr);
5378 } 5444 }
5379 5445
5380 5446
5381 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 5447 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5448 ASSERT(ToRegister(instr->context()).is(cp));
5382 Label materialized; 5449 Label materialized;
5383 // Registers will be used as follows: 5450 // Registers will be used as follows:
5384 // r6 = literals array. 5451 // r6 = literals array.
5385 // r1 = regexp literal. 5452 // r1 = regexp literal.
5386 // r0 = regexp literal clone. 5453 // r0 = regexp literal clone.
5387 // r2-5 are used as temporaries. 5454 // r2-5 are used as temporaries.
5388 int literal_offset = 5455 int literal_offset =
5389 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); 5456 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5390 __ LoadHeapObject(r6, instr->hydrogen()->literals()); 5457 __ LoadHeapObject(r6, instr->hydrogen()->literals());
5391 __ ldr(r1, FieldMemOperand(r6, literal_offset)); 5458 __ ldr(r1, FieldMemOperand(r6, literal_offset));
(...skipping 23 matching lines...) Expand all
5415 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5482 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5416 __ pop(r1); 5483 __ pop(r1);
5417 5484
5418 __ bind(&allocated); 5485 __ bind(&allocated);
5419 // Copy the content into the newly allocated memory. 5486 // Copy the content into the newly allocated memory.
5420 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize); 5487 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize);
5421 } 5488 }
5422 5489
5423 5490
5424 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5491 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5492 ASSERT(ToRegister(instr->context()).is(cp));
5425 // Use the fast case closure allocation code that allocates in new 5493 // Use the fast case closure allocation code that allocates in new
5426 // space for nested functions that don't need literals cloning. 5494 // space for nested functions that don't need literals cloning.
5427 bool pretenure = instr->hydrogen()->pretenure(); 5495 bool pretenure = instr->hydrogen()->pretenure();
5428 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5496 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5429 FastNewClosureStub stub(instr->hydrogen()->language_mode(), 5497 FastNewClosureStub stub(instr->hydrogen()->language_mode(),
5430 instr->hydrogen()->is_generator()); 5498 instr->hydrogen()->is_generator());
5431 __ mov(r2, Operand(instr->hydrogen()->shared_info())); 5499 __ mov(r2, Operand(instr->hydrogen()->shared_info()));
5432 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5500 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5433 } else { 5501 } else {
5434 __ mov(r2, Operand(instr->hydrogen()->shared_info())); 5502 __ mov(r2, Operand(instr->hydrogen()->shared_info()));
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
5609 } 5677 }
5610 5678
5611 5679
5612 void LCodeGen::DoDummyUse(LDummyUse* instr) { 5680 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5613 // Nothing to see here, move on! 5681 // Nothing to see here, move on!
5614 } 5682 }
5615 5683
5616 5684
5617 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 5685 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5618 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 5686 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
5687 LoadContextFromDeferred(instr->context());
5619 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 5688 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5620 RecordSafepointWithLazyDeopt( 5689 RecordSafepointWithLazyDeopt(
5621 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 5690 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
5622 ASSERT(instr->HasEnvironment()); 5691 ASSERT(instr->HasEnvironment());
5623 LEnvironment* env = instr->environment(); 5692 LEnvironment* env = instr->environment();
5624 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5693 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5625 } 5694 }
5626 5695
5627 5696
5628 void LCodeGen::DoStackCheck(LStackCheck* instr) { 5697 void LCodeGen::DoStackCheck(LStackCheck* instr) {
(...skipping 13 matching lines...) Expand all
5642 LEnvironment* env = instr->environment(); 5711 LEnvironment* env = instr->environment();
5643 // There is no LLazyBailout instruction for stack-checks. We have to 5712 // There is no LLazyBailout instruction for stack-checks. We have to
5644 // prepare for lazy deoptimization explicitly here. 5713 // prepare for lazy deoptimization explicitly here.
5645 if (instr->hydrogen()->is_function_entry()) { 5714 if (instr->hydrogen()->is_function_entry()) {
5646 // Perform stack overflow check. 5715 // Perform stack overflow check.
5647 Label done; 5716 Label done;
5648 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5717 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5649 __ cmp(sp, Operand(ip)); 5718 __ cmp(sp, Operand(ip));
5650 __ b(hs, &done); 5719 __ b(hs, &done);
5651 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); 5720 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
5721 ASSERT(instr->context()->IsRegister());
5722 ASSERT(ToRegister(instr->context()).is(cp));
5652 CallCode(isolate()->builtins()->StackCheck(), 5723 CallCode(isolate()->builtins()->StackCheck(),
5653 RelocInfo::CODE_TARGET, 5724 RelocInfo::CODE_TARGET,
5654 instr); 5725 instr);
5655 EnsureSpaceForLazyDeopt(); 5726 EnsureSpaceForLazyDeopt();
5656 last_lazy_deopt_pc_ = masm()->pc_offset(); 5727 last_lazy_deopt_pc_ = masm()->pc_offset();
5657 __ bind(&done); 5728 __ bind(&done);
5658 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5729 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5659 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5730 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5660 } else { 5731 } else {
5661 ASSERT(instr->hydrogen()->is_backwards_branch()); 5732 ASSERT(instr->hydrogen()->is_backwards_branch());
5662 // Perform stack overflow check if this goto needs it before jumping. 5733 // Perform stack overflow check if this goto needs it before jumping.
5663 DeferredStackCheck* deferred_stack_check = 5734 DeferredStackCheck* deferred_stack_check =
5664 new(zone()) DeferredStackCheck(this, instr); 5735 new(zone()) DeferredStackCheck(this, instr);
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
5782 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); 5853 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5783 __ ldr(result, FieldMemOperand(scratch, 5854 __ ldr(result, FieldMemOperand(scratch,
5784 FixedArray::kHeaderSize - kPointerSize)); 5855 FixedArray::kHeaderSize - kPointerSize));
5785 __ bind(&done); 5856 __ bind(&done);
5786 } 5857 }
5787 5858
5788 5859
5789 #undef __ 5860 #undef __
5790 5861
5791 } } // namespace v8::internal 5862 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698