Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(185)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 24596002: ARM: Let the register allocator handle the context register. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after
241 r3, 241 r3,
242 GetLinkRegisterState(), 242 GetLinkRegisterState(),
243 kSaveFPRegs); 243 kSaveFPRegs);
244 } 244 }
245 } 245 }
246 Comment(";;; End allocate local context"); 246 Comment(";;; End allocate local context");
247 } 247 }
248 248
249 // Trace the call. 249 // Trace the call.
250 if (FLAG_trace && info()->IsOptimizing()) { 250 if (FLAG_trace && info()->IsOptimizing()) {
251 // We have not executed any compiled code yet, so cp still holds the
252 // incoming context.
251 __ CallRuntime(Runtime::kTraceEnter, 0); 253 __ CallRuntime(Runtime::kTraceEnter, 0);
252 } 254 }
253 return !is_aborted(); 255 return !is_aborted();
254 } 256 }
255 257
256 258
257 void LCodeGen::GenerateOsrPrologue() { 259 void LCodeGen::GenerateOsrPrologue() {
258 // Generate the OSR entry prologue at the first unknown OSR value, or if there 260 // Generate the OSR entry prologue at the first unknown OSR value, or if there
259 // are none, at the OSR entrypoint instruction. 261 // are none, at the OSR entrypoint instruction.
260 if (osr_pc_offset_ >= 0) return; 262 if (osr_pc_offset_ >= 0) return;
(...skipping 491 matching lines...) Expand 10 before | Expand all | Expand 10 after
752 ASSERT(instr != NULL); 754 ASSERT(instr != NULL);
753 LPointerMap* pointers = instr->pointer_map(); 755 LPointerMap* pointers = instr->pointer_map();
754 ASSERT(pointers != NULL); 756 ASSERT(pointers != NULL);
755 RecordPosition(pointers->position()); 757 RecordPosition(pointers->position());
756 758
757 __ CallRuntime(function, num_arguments); 759 __ CallRuntime(function, num_arguments);
758 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 760 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
759 } 761 }
760 762
761 763
764 void LCodeGen::LoadContextFromDeferred(LOperand* context) {
765 if (context->IsRegister()) {
766 __ Move(cp, ToRegister(context));
767 } else if (context->IsStackSlot()) {
768 __ ldr(cp, ToMemOperand(context));
769 } else {
770 UNREACHABLE();
771 }
772 }
773
774
762 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 775 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
763 int argc, 776 int argc,
764 LInstruction* instr) { 777 LInstruction* instr,
778 LOperand* context) {
779 LoadContextFromDeferred(context);
765 __ CallRuntimeSaveDoubles(id); 780 __ CallRuntimeSaveDoubles(id);
766 RecordSafepointWithRegisters( 781 RecordSafepointWithRegisters(
767 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt); 782 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
768 } 783 }
769 784
770 785
771 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, 786 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
772 Safepoint::DeoptMode mode) { 787 Safepoint::DeoptMode mode) {
773 if (!environment->HasBeenRegistered()) { 788 if (!environment->HasBeenRegistered()) {
774 // Physical stack frame layout: 789 // Physical stack frame layout:
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after
970 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 985 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
971 kind, arguments, deopt_mode); 986 kind, arguments, deopt_mode);
972 for (int i = 0; i < operands->length(); i++) { 987 for (int i = 0; i < operands->length(); i++) {
973 LOperand* pointer = operands->at(i); 988 LOperand* pointer = operands->at(i);
974 if (pointer->IsStackSlot()) { 989 if (pointer->IsStackSlot()) {
975 safepoint.DefinePointerSlot(pointer->index(), zone()); 990 safepoint.DefinePointerSlot(pointer->index(), zone());
976 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 991 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
977 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); 992 safepoint.DefinePointerRegister(ToRegister(pointer), zone());
978 } 993 }
979 } 994 }
980 if (kind & Safepoint::kWithRegisters) {
981 // Register cp always contains a pointer to the context.
982 safepoint.DefinePointerRegister(cp, zone());
983 }
984 } 995 }
985 996
986 997
987 void LCodeGen::RecordSafepoint(LPointerMap* pointers, 998 void LCodeGen::RecordSafepoint(LPointerMap* pointers,
988 Safepoint::DeoptMode deopt_mode) { 999 Safepoint::DeoptMode deopt_mode) {
989 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode); 1000 RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
990 } 1001 }
991 1002
992 1003
993 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) { 1004 void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1066 DoGap(instr); 1077 DoGap(instr);
1067 } 1078 }
1068 1079
1069 1080
1070 void LCodeGen::DoParameter(LParameter* instr) { 1081 void LCodeGen::DoParameter(LParameter* instr) {
1071 // Nothing to do. 1082 // Nothing to do.
1072 } 1083 }
1073 1084
1074 1085
1075 void LCodeGen::DoCallStub(LCallStub* instr) { 1086 void LCodeGen::DoCallStub(LCallStub* instr) {
1087 ASSERT(ToRegister(instr->context()).is(cp));
1076 ASSERT(ToRegister(instr->result()).is(r0)); 1088 ASSERT(ToRegister(instr->result()).is(r0));
1077 switch (instr->hydrogen()->major_key()) { 1089 switch (instr->hydrogen()->major_key()) {
1078 case CodeStub::RegExpConstructResult: { 1090 case CodeStub::RegExpConstructResult: {
1079 RegExpConstructResultStub stub; 1091 RegExpConstructResultStub stub;
1080 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1092 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
1081 break; 1093 break;
1082 } 1094 }
1083 case CodeStub::RegExpExec: { 1095 case CodeStub::RegExpExec: {
1084 RegExpExecStub stub; 1096 RegExpExecStub stub;
1085 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 1097 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
(...skipping 930 matching lines...) Expand 10 before | Expand all | Expand 10 after
2016 __ add(scratch, string, Operand(index, LSL, 1)); 2028 __ add(scratch, string, Operand(index, LSL, 1));
2017 __ strh(value, FieldMemOperand(scratch, SeqString::kHeaderSize)); 2029 __ strh(value, FieldMemOperand(scratch, SeqString::kHeaderSize));
2018 } 2030 }
2019 } 2031 }
2020 } 2032 }
2021 2033
2022 2034
2023 void LCodeGen::DoThrow(LThrow* instr) { 2035 void LCodeGen::DoThrow(LThrow* instr) {
2024 Register input_reg = EmitLoadRegister(instr->value(), ip); 2036 Register input_reg = EmitLoadRegister(instr->value(), ip);
2025 __ push(input_reg); 2037 __ push(input_reg);
2038 ASSERT(ToRegister(instr->context()).is(cp));
2026 CallRuntime(Runtime::kThrow, 1, instr); 2039 CallRuntime(Runtime::kThrow, 1, instr);
2027 2040
2028 if (FLAG_debug_code) { 2041 if (FLAG_debug_code) {
2029 __ stop("Unreachable code."); 2042 __ stop("Unreachable code.");
2030 } 2043 }
2031 } 2044 }
2032 2045
2033 2046
2034 void LCodeGen::DoAddI(LAddI* instr) { 2047 void LCodeGen::DoAddI(LAddI* instr) {
2035 LOperand* left = instr->left(); 2048 LOperand* left = instr->left();
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
2153 break; 2166 break;
2154 } 2167 }
2155 default: 2168 default:
2156 UNREACHABLE(); 2169 UNREACHABLE();
2157 break; 2170 break;
2158 } 2171 }
2159 } 2172 }
2160 2173
2161 2174
2162 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 2175 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2176 ASSERT(ToRegister(instr->context()).is(cp));
2163 ASSERT(ToRegister(instr->left()).is(r1)); 2177 ASSERT(ToRegister(instr->left()).is(r1));
2164 ASSERT(ToRegister(instr->right()).is(r0)); 2178 ASSERT(ToRegister(instr->right()).is(r0));
2165 ASSERT(ToRegister(instr->result()).is(r0)); 2179 ASSERT(ToRegister(instr->result()).is(r0));
2166 2180
2167 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 2181 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
2168 // Block literal pool emission to ensure nop indicating no inlined smi code 2182 // Block literal pool emission to ensure nop indicating no inlined smi code
2169 // is in the correct position. 2183 // is in the correct position.
2170 Assembler::BlockConstPoolScope block_const_pool(masm()); 2184 Assembler::BlockConstPoolScope block_const_pool(masm());
2171 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2185 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2172 __ nop(); // Signals no inlined code. 2186 __ nop(); // Signals no inlined code.
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after
2563 return le; 2577 return le;
2564 case Token::GTE: 2578 case Token::GTE:
2565 return ge; 2579 return ge;
2566 default: 2580 default:
2567 UNREACHABLE(); 2581 UNREACHABLE();
2568 return kNoCondition; 2582 return kNoCondition;
2569 } 2583 }
2570 } 2584 }
2571 2585
2572 2586
2573 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { 2587 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
ulan 2013/09/26 10:06:33 ASSERT(ToRegister(instr->context()).is(cp));
Rodolph Perfetta 2013/09/26 12:20:12 Done.
2574 Token::Value op = instr->op(); 2588 Token::Value op = instr->op();
2575 2589
2576 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2590 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2577 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2591 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2578 // This instruction also signals no smi code inlined. 2592 // This instruction also signals no smi code inlined.
2579 __ cmp(r0, Operand::Zero()); 2593 __ cmp(r0, Operand::Zero());
2580 2594
2581 Condition condition = ComputeCompareCondition(op); 2595 Condition condition = ComputeCompareCondition(op);
2582 2596
2583 EmitBranch(instr, condition); 2597 EmitBranch(instr, condition);
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
2724 Register reg = ToRegister(instr->value()); 2738 Register reg = ToRegister(instr->value());
2725 Register temp = ToRegister(instr->temp()); 2739 Register temp = ToRegister(instr->temp());
2726 2740
2727 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); 2741 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2728 __ cmp(temp, Operand(instr->map())); 2742 __ cmp(temp, Operand(instr->map()));
2729 EmitBranch(instr, eq); 2743 EmitBranch(instr, eq);
2730 } 2744 }
2731 2745
2732 2746
2733 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2747 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2748 ASSERT(ToRegister(instr->context()).is(cp));
2734 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0. 2749 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
2735 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1. 2750 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
2736 2751
2737 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 2752 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
2738 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2753 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2739 2754
2740 __ cmp(r0, Operand::Zero()); 2755 __ cmp(r0, Operand::Zero());
2741 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); 2756 __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
2742 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); 2757 __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
2743 } 2758 }
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2833 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2848 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2834 flags = static_cast<InstanceofStub::Flags>( 2849 flags = static_cast<InstanceofStub::Flags>(
2835 flags | InstanceofStub::kArgsInRegisters); 2850 flags | InstanceofStub::kArgsInRegisters);
2836 flags = static_cast<InstanceofStub::Flags>( 2851 flags = static_cast<InstanceofStub::Flags>(
2837 flags | InstanceofStub::kCallSiteInlineCheck); 2852 flags | InstanceofStub::kCallSiteInlineCheck);
2838 flags = static_cast<InstanceofStub::Flags>( 2853 flags = static_cast<InstanceofStub::Flags>(
2839 flags | InstanceofStub::kReturnTrueFalseObject); 2854 flags | InstanceofStub::kReturnTrueFalseObject);
2840 InstanceofStub stub(flags); 2855 InstanceofStub stub(flags);
2841 2856
2842 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 2857 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2858 LoadContextFromDeferred(instr->context());
2843 2859
2844 // Get the temp register reserved by the instruction. This needs to be r4 as 2860 // Get the temp register reserved by the instruction. This needs to be r4 as
2845 // its slot of the pushing of safepoint registers is used to communicate the 2861 // its slot of the pushing of safepoint registers is used to communicate the
2846 // offset to the location of the map check. 2862 // offset to the location of the map check.
2847 Register temp = ToRegister(instr->temp()); 2863 Register temp = ToRegister(instr->temp());
2848 ASSERT(temp.is(r4)); 2864 ASSERT(temp.is(r4));
2849 __ LoadHeapObject(InstanceofStub::right(), instr->function()); 2865 __ LoadHeapObject(InstanceofStub::right(), instr->function());
2850 static const int kAdditionalDelta = 5; 2866 static const int kAdditionalDelta = 5;
2851 // Make sure that code size is predicable, since we use specific constants 2867 // Make sure that code size is predicable, since we use specific constants
2852 // offsets in the code to find embedded values.. 2868 // offsets in the code to find embedded values..
(...skipping 23 matching lines...) Expand all
2876 2892
2877 2893
2878 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { 2894 void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
2879 Register object = ToRegister(instr->object()); 2895 Register object = ToRegister(instr->object());
2880 Register result = ToRegister(instr->result()); 2896 Register result = ToRegister(instr->result());
2881 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); 2897 __ ldr(result, FieldMemOperand(object, HeapObject::kMapOffset));
2882 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); 2898 __ ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset));
2883 } 2899 }
2884 2900
2885 2901
2886 void LCodeGen::DoCmpT(LCmpT* instr) { 2902 void LCodeGen::DoCmpT(LCmpT* instr) {
ulan 2013/09/26 10:06:33 ASSERT(ToRegister(instr->context()).is(cp));
Rodolph Perfetta 2013/09/26 12:20:12 Done.
2887 Token::Value op = instr->op(); 2903 Token::Value op = instr->op();
2888 2904
2889 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2905 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2890 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2906 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2891 // This instruction also signals no smi code inlined. 2907 // This instruction also signals no smi code inlined.
2892 __ cmp(r0, Operand::Zero()); 2908 __ cmp(r0, Operand::Zero());
2893 2909
2894 Condition condition = ComputeCompareCondition(op); 2910 Condition condition = ComputeCompareCondition(op);
2895 __ LoadRoot(ToRegister(instr->result()), 2911 __ LoadRoot(ToRegister(instr->result()),
2896 Heap::kTrueValueRootIndex, 2912 Heap::kTrueValueRootIndex,
2897 condition); 2913 condition);
2898 __ LoadRoot(ToRegister(instr->result()), 2914 __ LoadRoot(ToRegister(instr->result()),
2899 Heap::kFalseValueRootIndex, 2915 Heap::kFalseValueRootIndex,
2900 NegateCondition(condition)); 2916 NegateCondition(condition));
2901 } 2917 }
2902 2918
2903 2919
2904 void LCodeGen::DoReturn(LReturn* instr) { 2920 void LCodeGen::DoReturn(LReturn* instr) {
2905 if (FLAG_trace && info()->IsOptimizing()) { 2921 if (FLAG_trace && info()->IsOptimizing()) {
2906 // Push the return value on the stack as the parameter. 2922 // Push the return value on the stack as the parameter.
2907 // Runtime::TraceExit returns its parameter in r0. 2923 // Runtime::TraceExit returns its parameter in r0. We're leaving the code
2924 // managed by the register allocator and tearing down the frame, it's
2925 // safe to write to the context register.
2908 __ push(r0); 2926 __ push(r0);
2927 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2909 __ CallRuntime(Runtime::kTraceExit, 1); 2928 __ CallRuntime(Runtime::kTraceExit, 1);
2910 } 2929 }
2911 if (info()->saves_caller_doubles()) { 2930 if (info()->saves_caller_doubles()) {
2912 ASSERT(NeedsEagerFrame()); 2931 ASSERT(NeedsEagerFrame());
2913 BitVector* doubles = chunk()->allocated_double_registers(); 2932 BitVector* doubles = chunk()->allocated_double_registers();
2914 BitVector::Iterator save_iterator(doubles); 2933 BitVector::Iterator save_iterator(doubles);
2915 int count = 0; 2934 int count = 0;
2916 while (!save_iterator.Done()) { 2935 while (!save_iterator.Done()) {
2917 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()), 2936 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
2918 MemOperand(sp, count * kDoubleSize)); 2937 MemOperand(sp, count * kDoubleSize));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2953 __ ldr(result, FieldMemOperand(ip, Cell::kValueOffset)); 2972 __ ldr(result, FieldMemOperand(ip, Cell::kValueOffset));
2954 if (instr->hydrogen()->RequiresHoleCheck()) { 2973 if (instr->hydrogen()->RequiresHoleCheck()) {
2955 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 2974 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2956 __ cmp(result, ip); 2975 __ cmp(result, ip);
2957 DeoptimizeIf(eq, instr->environment()); 2976 DeoptimizeIf(eq, instr->environment());
2958 } 2977 }
2959 } 2978 }
2960 2979
2961 2980
2962 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 2981 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2982 ASSERT(ToRegister(instr->context()).is(cp));
2963 ASSERT(ToRegister(instr->global_object()).is(r0)); 2983 ASSERT(ToRegister(instr->global_object()).is(r0));
2964 ASSERT(ToRegister(instr->result()).is(r0)); 2984 ASSERT(ToRegister(instr->result()).is(r0));
2965 2985
2966 __ mov(r2, Operand(instr->name())); 2986 __ mov(r2, Operand(instr->name()));
2967 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET 2987 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET
2968 : RelocInfo::CODE_TARGET_CONTEXT; 2988 : RelocInfo::CODE_TARGET_CONTEXT;
2969 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2989 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2970 CallCode(ic, mode, instr); 2990 CallCode(ic, mode, instr);
2971 } 2991 }
2972 2992
(...skipping 17 matching lines...) Expand all
2990 DeoptimizeIf(eq, instr->environment()); 3010 DeoptimizeIf(eq, instr->environment());
2991 } 3011 }
2992 3012
2993 // Store the value. 3013 // Store the value.
2994 __ str(value, FieldMemOperand(cell, Cell::kValueOffset)); 3014 __ str(value, FieldMemOperand(cell, Cell::kValueOffset));
2995 // Cells are always rescanned, so no write barrier here. 3015 // Cells are always rescanned, so no write barrier here.
2996 } 3016 }
2997 3017
2998 3018
2999 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 3019 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
3020 ASSERT(ToRegister(instr->context()).is(cp));
3000 ASSERT(ToRegister(instr->global_object()).is(r1)); 3021 ASSERT(ToRegister(instr->global_object()).is(r1));
3001 ASSERT(ToRegister(instr->value()).is(r0)); 3022 ASSERT(ToRegister(instr->value()).is(r0));
3002 3023
3003 __ mov(r2, Operand(instr->name())); 3024 __ mov(r2, Operand(instr->name()));
3004 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 3025 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
3005 ? isolate()->builtins()->StoreIC_Initialize_Strict() 3026 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3006 : isolate()->builtins()->StoreIC_Initialize(); 3027 : isolate()->builtins()->StoreIC_Initialize();
3007 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); 3028 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
3008 } 3029 }
3009 3030
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
3083 if (access.IsInobject()) { 3104 if (access.IsInobject()) {
3084 __ ldr(result, FieldMemOperand(object, offset)); 3105 __ ldr(result, FieldMemOperand(object, offset));
3085 } else { 3106 } else {
3086 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 3107 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
3087 __ ldr(result, FieldMemOperand(result, offset)); 3108 __ ldr(result, FieldMemOperand(result, offset));
3088 } 3109 }
3089 } 3110 }
3090 3111
3091 3112
3092 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 3113 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3114 ASSERT(ToRegister(instr->context()).is(cp));
3093 ASSERT(ToRegister(instr->object()).is(r0)); 3115 ASSERT(ToRegister(instr->object()).is(r0));
3094 ASSERT(ToRegister(instr->result()).is(r0)); 3116 ASSERT(ToRegister(instr->result()).is(r0));
3095 3117
3096 // Name is always in r2. 3118 // Name is always in r2.
3097 __ mov(r2, Operand(instr->name())); 3119 __ mov(r2, Operand(instr->name()));
3098 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 3120 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3099 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 3121 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3100 } 3122 }
3101 3123
3102 3124
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
3376 if (shift_size >= 0) { 3398 if (shift_size >= 0) {
3377 return MemOperand(base, scratch0(), LSL, shift_size); 3399 return MemOperand(base, scratch0(), LSL, shift_size);
3378 } else { 3400 } else {
3379 ASSERT_EQ(-1, shift_size); 3401 ASSERT_EQ(-1, shift_size);
3380 return MemOperand(base, scratch0(), LSR, 1); 3402 return MemOperand(base, scratch0(), LSR, 1);
3381 } 3403 }
3382 } 3404 }
3383 3405
3384 3406
3385 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 3407 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3408 ASSERT(ToRegister(instr->context()).is(cp));
3386 ASSERT(ToRegister(instr->object()).is(r1)); 3409 ASSERT(ToRegister(instr->object()).is(r1));
3387 ASSERT(ToRegister(instr->key()).is(r0)); 3410 ASSERT(ToRegister(instr->key()).is(r0));
3388 3411
3389 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 3412 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
3390 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 3413 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3391 } 3414 }
3392 3415
3393 3416
3394 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3417 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3395 Register scratch = scratch0(); 3418 Register scratch = scratch0();
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
3520 ASSERT(instr->HasPointerMap()); 3543 ASSERT(instr->HasPointerMap());
3521 LPointerMap* pointers = instr->pointer_map(); 3544 LPointerMap* pointers = instr->pointer_map();
3522 RecordPosition(pointers->position()); 3545 RecordPosition(pointers->position());
3523 SafepointGenerator safepoint_generator( 3546 SafepointGenerator safepoint_generator(
3524 this, pointers, Safepoint::kLazyDeopt); 3547 this, pointers, Safepoint::kLazyDeopt);
3525 // The number of arguments is stored in receiver which is r0, as expected 3548 // The number of arguments is stored in receiver which is r0, as expected
3526 // by InvokeFunction. 3549 // by InvokeFunction.
3527 ParameterCount actual(receiver); 3550 ParameterCount actual(receiver);
3528 __ InvokeFunction(function, actual, CALL_FUNCTION, 3551 __ InvokeFunction(function, actual, CALL_FUNCTION,
3529 safepoint_generator, CALL_AS_METHOD); 3552 safepoint_generator, CALL_AS_METHOD);
3530 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3531 } 3553 }
3532 3554
3533 3555
3534 void LCodeGen::DoPushArgument(LPushArgument* instr) { 3556 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3535 LOperand* argument = instr->value(); 3557 LOperand* argument = instr->value();
3536 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { 3558 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
3537 Abort(kDoPushArgumentNotImplementedForDoubleType); 3559 Abort(kDoPushArgumentNotImplementedForDoubleType);
3538 } else { 3560 } else {
3539 Register argument_reg = EmitLoadRegister(argument, ip); 3561 Register argument_reg = EmitLoadRegister(argument, ip);
3540 __ push(argument_reg); 3562 __ push(argument_reg);
3541 } 3563 }
3542 } 3564 }
3543 3565
3544 3566
3545 void LCodeGen::DoDrop(LDrop* instr) { 3567 void LCodeGen::DoDrop(LDrop* instr) {
3546 __ Drop(instr->count()); 3568 __ Drop(instr->count());
3547 } 3569 }
3548 3570
3549 3571
3550 void LCodeGen::DoThisFunction(LThisFunction* instr) { 3572 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3551 Register result = ToRegister(instr->result()); 3573 Register result = ToRegister(instr->result());
3552 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 3574 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3553 } 3575 }
3554 3576
3555 3577
3556 void LCodeGen::DoContext(LContext* instr) { 3578 void LCodeGen::DoContext(LContext* instr) {
3557 // If there is a non-return use, the context must be moved to a register. 3579 // If there is a non-return use, the context must be moved to a register.
3558 Register result = ToRegister(instr->result()); 3580 Register result = ToRegister(instr->result());
3559 for (HUseIterator it(instr->hydrogen()->uses()); !it.Done(); it.Advance()) { 3581 if (info()->IsOptimizing()) {
3560 if (!it.value()->IsReturn()) { 3582 __ ldr(result, MemOperand(fp, StandardFrameConstants::kContextOffset));
3561 __ mov(result, cp); 3583 } else {
3562 return; 3584 // If there is no frame, the context must be in cp.
3563 } 3585 ASSERT(result.is(cp));
3564 } 3586 }
3565 } 3587 }
3566 3588
3567 3589
3568 void LCodeGen::DoOuterContext(LOuterContext* instr) { 3590 void LCodeGen::DoOuterContext(LOuterContext* instr) {
3569 Register context = ToRegister(instr->context()); 3591 Register context = ToRegister(instr->context());
3570 Register result = ToRegister(instr->result()); 3592 Register result = ToRegister(instr->result());
3571 __ ldr(result, 3593 __ ldr(result,
3572 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); 3594 MemOperand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
3573 } 3595 }
3574 3596
3575 3597
3576 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3598 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3599 ASSERT(ToRegister(instr->context()).is(cp));
3577 __ push(cp); // The context is the first argument. 3600 __ push(cp); // The context is the first argument.
3578 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs()); 3601 __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
3579 __ push(scratch0()); 3602 __ push(scratch0());
3580 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); 3603 __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
3581 __ push(scratch0()); 3604 __ push(scratch0());
3582 CallRuntime(Runtime::kDeclareGlobals, 3, instr); 3605 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3583 } 3606 }
3584 3607
3585 3608
3586 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { 3609 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
3610 Register context = ToRegister(instr->context());
3587 Register result = ToRegister(instr->result()); 3611 Register result = ToRegister(instr->result());
3588 __ ldr(result, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3612 __ ldr(result, ContextOperand(context, Context::GLOBAL_OBJECT_INDEX));
3589 } 3613 }
3590 3614
3591 3615
3592 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { 3616 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
3593 Register global = ToRegister(instr->global_object()); 3617 Register global = ToRegister(instr->global_object());
3594 Register result = ToRegister(instr->result()); 3618 Register result = ToRegister(instr->result());
3595 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset)); 3619 __ ldr(result, FieldMemOperand(global, GlobalObject::kGlobalReceiverOffset));
3596 } 3620 }
3597 3621
3598 3622
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3631 3655
3632 // Set up deoptimization. 3656 // Set up deoptimization.
3633 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 3657 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3634 } else { 3658 } else {
3635 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3659 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3636 ParameterCount count(arity); 3660 ParameterCount count(arity);
3637 ParameterCount expected(formal_parameter_count); 3661 ParameterCount expected(formal_parameter_count);
3638 __ InvokeFunction( 3662 __ InvokeFunction(
3639 function, expected, count, CALL_FUNCTION, generator, call_kind); 3663 function, expected, count, CALL_FUNCTION, generator, call_kind);
3640 } 3664 }
3641
3642 // Restore context.
3643 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3644 } 3665 }
3645 3666
3646 3667
3647 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 3668 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
3648 ASSERT(ToRegister(instr->result()).is(r0)); 3669 ASSERT(ToRegister(instr->result()).is(r0));
3649 CallKnownFunction(instr->hydrogen()->function(), 3670 CallKnownFunction(instr->hydrogen()->function(),
3650 instr->hydrogen()->formal_parameter_count(), 3671 instr->hydrogen()->formal_parameter_count(),
3651 instr->arity(), 3672 instr->arity(),
3652 instr, 3673 instr,
3653 CALL_AS_METHOD, 3674 CALL_AS_METHOD,
3654 R1_UNINITIALIZED); 3675 R1_UNINITIALIZED);
3655 } 3676 }
3656 3677
3657 3678
3658 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { 3679 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3680 ASSERT(instr->context() != NULL);
3681 ASSERT(ToRegister(instr->context()).is(cp));
3659 Register input = ToRegister(instr->value()); 3682 Register input = ToRegister(instr->value());
3660 Register result = ToRegister(instr->result()); 3683 Register result = ToRegister(instr->result());
3661 Register scratch = scratch0(); 3684 Register scratch = scratch0();
3662 3685
3663 // Deoptimize if not a heap number. 3686 // Deoptimize if not a heap number.
3664 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 3687 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3665 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 3688 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3666 __ cmp(scratch, Operand(ip)); 3689 __ cmp(scratch, Operand(ip));
3667 DeoptimizeIf(ne, instr->environment()); 3690 DeoptimizeIf(ne, instr->environment());
3668 3691
(...skipping 23 matching lines...) Expand all
3692 // exponent: floating point exponent value. 3715 // exponent: floating point exponent value.
3693 3716
3694 Label allocated, slow; 3717 Label allocated, slow;
3695 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex); 3718 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3696 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow); 3719 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3697 __ b(&allocated); 3720 __ b(&allocated);
3698 3721
3699 // Slow case: Call the runtime system to do the number allocation. 3722 // Slow case: Call the runtime system to do the number allocation.
3700 __ bind(&slow); 3723 __ bind(&slow);
3701 3724
3702 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 3725 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr,
3726 instr->context());
3703 // Set the pointer to the new heap number in tmp. 3727 // Set the pointer to the new heap number in tmp.
3704 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0)); 3728 if (!tmp1.is(r0)) __ mov(tmp1, Operand(r0));
3705 // Restore input_reg after call to runtime. 3729 // Restore input_reg after call to runtime.
3706 __ LoadFromSafepointRegisterSlot(input, input); 3730 __ LoadFromSafepointRegisterSlot(input, input);
3707 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); 3731 __ ldr(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset));
3708 3732
3709 __ bind(&allocated); 3733 __ bind(&allocated);
3710 // exponent: floating point exponent value. 3734 // exponent: floating point exponent value.
3711 // tmp1: allocated heap number. 3735 // tmp1: allocated heap number.
3712 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask)); 3736 __ bic(exponent, exponent, Operand(HeapNumber::kSignMask));
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after
3962 Register temp1 = ToRegister(instr->temp1()); 3986 Register temp1 = ToRegister(instr->temp1());
3963 Register temp2 = ToRegister(instr->temp2()); 3987 Register temp2 = ToRegister(instr->temp2());
3964 3988
3965 MathExpGenerator::EmitMathExp( 3989 MathExpGenerator::EmitMathExp(
3966 masm(), input, result, double_scratch1, double_scratch2, 3990 masm(), input, result, double_scratch1, double_scratch2,
3967 temp1, temp2, scratch0()); 3991 temp1, temp2, scratch0());
3968 } 3992 }
3969 3993
3970 3994
3971 void LCodeGen::DoMathLog(LMathLog* instr) { 3995 void LCodeGen::DoMathLog(LMathLog* instr) {
3972 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 3996 ASSERT(ToDoubleRegister(instr->result()).is(d2));
ulan 2013/09/26 10:06:33 Set cp as in other DoMath* functions.
Rodolph Perfetta 2013/09/26 12:20:12 Done.
3973 TranscendentalCacheStub stub(TranscendentalCache::LOG, 3997 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3974 TranscendentalCacheStub::UNTAGGED); 3998 TranscendentalCacheStub::UNTAGGED);
3975 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 3999 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3976 } 4000 }
3977 4001
3978 4002
3979 void LCodeGen::DoMathTan(LMathTan* instr) { 4003 void LCodeGen::DoMathTan(LMathTan* instr) {
3980 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4004 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4005 // Set the context register to a GC-safe fake value. Clobbering it is
4006 // OK because this instruction is marked as a call.
4007 __ mov(cp, Operand::Zero());
3981 TranscendentalCacheStub stub(TranscendentalCache::TAN, 4008 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3982 TranscendentalCacheStub::UNTAGGED); 4009 TranscendentalCacheStub::UNTAGGED);
3983 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4010 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3984 } 4011 }
3985 4012
3986 4013
3987 void LCodeGen::DoMathCos(LMathCos* instr) { 4014 void LCodeGen::DoMathCos(LMathCos* instr) {
3988 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4015 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4016 // Set the context register to a GC-safe fake value. Clobbering it is
4017 // OK because this instruction is marked as a call.
4018 __ mov(cp, Operand::Zero());
3989 TranscendentalCacheStub stub(TranscendentalCache::COS, 4019 TranscendentalCacheStub stub(TranscendentalCache::COS,
3990 TranscendentalCacheStub::UNTAGGED); 4020 TranscendentalCacheStub::UNTAGGED);
3991 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4021 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
3992 } 4022 }
3993 4023
3994 4024
3995 void LCodeGen::DoMathSin(LMathSin* instr) { 4025 void LCodeGen::DoMathSin(LMathSin* instr) {
3996 ASSERT(ToDoubleRegister(instr->result()).is(d2)); 4026 ASSERT(ToDoubleRegister(instr->result()).is(d2));
4027 // Set the context register to a GC-safe fake value. Clobbering it is
4028 // OK because this instruction is marked as a call.
4029 __ mov(cp, Operand::Zero());
3997 TranscendentalCacheStub stub(TranscendentalCache::SIN, 4030 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3998 TranscendentalCacheStub::UNTAGGED); 4031 TranscendentalCacheStub::UNTAGGED);
3999 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4032 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4000 } 4033 }
4001 4034
4002 4035
4003 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 4036 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
4037 ASSERT(ToRegister(instr->context()).is(cp));
4004 ASSERT(ToRegister(instr->function()).is(r1)); 4038 ASSERT(ToRegister(instr->function()).is(r1));
4005 ASSERT(instr->HasPointerMap()); 4039 ASSERT(instr->HasPointerMap());
4006 4040
4007 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); 4041 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
4008 if (known_function.is_null()) { 4042 if (known_function.is_null()) {
4009 LPointerMap* pointers = instr->pointer_map(); 4043 LPointerMap* pointers = instr->pointer_map();
4010 RecordPosition(pointers->position()); 4044 RecordPosition(pointers->position());
4011 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 4045 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
4012 ParameterCount count(instr->arity()); 4046 ParameterCount count(instr->arity());
4013 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD); 4047 __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
4014 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4015 } else { 4048 } else {
4016 CallKnownFunction(known_function, 4049 CallKnownFunction(known_function,
4017 instr->hydrogen()->formal_parameter_count(), 4050 instr->hydrogen()->formal_parameter_count(),
4018 instr->arity(), 4051 instr->arity(),
4019 instr, 4052 instr,
4020 CALL_AS_METHOD, 4053 CALL_AS_METHOD,
4021 R1_CONTAINS_TARGET); 4054 R1_CONTAINS_TARGET);
4022 } 4055 }
4023 } 4056 }
4024 4057
4025 4058
4026 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 4059 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
4060 ASSERT(ToRegister(instr->context()).is(cp));
4027 ASSERT(ToRegister(instr->result()).is(r0)); 4061 ASSERT(ToRegister(instr->result()).is(r0));
4028 4062
4029 int arity = instr->arity(); 4063 int arity = instr->arity();
4030 Handle<Code> ic = 4064 Handle<Code> ic =
4031 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); 4065 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
4032 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4066 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4033 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4034 } 4067 }
4035 4068
4036 4069
4037 void LCodeGen::DoCallNamed(LCallNamed* instr) { 4070 void LCodeGen::DoCallNamed(LCallNamed* instr) {
4071 ASSERT(ToRegister(instr->context()).is(cp));
4038 ASSERT(ToRegister(instr->result()).is(r0)); 4072 ASSERT(ToRegister(instr->result()).is(r0));
4039 4073
4040 int arity = instr->arity(); 4074 int arity = instr->arity();
4041 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 4075 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4042 Handle<Code> ic = 4076 Handle<Code> ic =
4043 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 4077 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
4044 __ mov(r2, Operand(instr->name())); 4078 __ mov(r2, Operand(instr->name()));
4045 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS); 4079 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
4046 // Restore context register.
4047 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4048 } 4080 }
4049 4081
4050 4082
4051 void LCodeGen::DoCallFunction(LCallFunction* instr) { 4083 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4084 ASSERT(ToRegister(instr->context()).is(cp));
4052 ASSERT(ToRegister(instr->function()).is(r1)); 4085 ASSERT(ToRegister(instr->function()).is(r1));
4053 ASSERT(ToRegister(instr->result()).is(r0)); 4086 ASSERT(ToRegister(instr->result()).is(r0));
4054 4087
4055 int arity = instr->arity(); 4088 int arity = instr->arity();
4056 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); 4089 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
4057 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4090 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4058 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4059 } 4091 }
4060 4092
4061 4093
4062 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 4094 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
4095 ASSERT(ToRegister(instr->context()).is(cp));
4063 ASSERT(ToRegister(instr->result()).is(r0)); 4096 ASSERT(ToRegister(instr->result()).is(r0));
4064 4097
4065 int arity = instr->arity(); 4098 int arity = instr->arity();
4066 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 4099 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
4067 Handle<Code> ic = 4100 Handle<Code> ic =
4068 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 4101 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
4069 __ mov(r2, Operand(instr->name())); 4102 __ mov(r2, Operand(instr->name()));
4070 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS); 4103 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
4071 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4072 } 4104 }
4073 4105
4074 4106
4075 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 4107 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
4076 ASSERT(ToRegister(instr->result()).is(r0)); 4108 ASSERT(ToRegister(instr->result()).is(r0));
4077 CallKnownFunction(instr->hydrogen()->target(), 4109 CallKnownFunction(instr->hydrogen()->target(),
4078 instr->hydrogen()->formal_parameter_count(), 4110 instr->hydrogen()->formal_parameter_count(),
4079 instr->arity(), 4111 instr->arity(),
4080 instr, 4112 instr,
4081 CALL_AS_FUNCTION, 4113 CALL_AS_FUNCTION,
4082 R1_UNINITIALIZED); 4114 R1_UNINITIALIZED);
4083 } 4115 }
4084 4116
4085 4117
4086 void LCodeGen::DoCallNew(LCallNew* instr) { 4118 void LCodeGen::DoCallNew(LCallNew* instr) {
4119 ASSERT(ToRegister(instr->context()).is(cp));
4087 ASSERT(ToRegister(instr->constructor()).is(r1)); 4120 ASSERT(ToRegister(instr->constructor()).is(r1));
4088 ASSERT(ToRegister(instr->result()).is(r0)); 4121 ASSERT(ToRegister(instr->result()).is(r0));
4089 4122
4090 __ mov(r0, Operand(instr->arity())); 4123 __ mov(r0, Operand(instr->arity()));
4091 // No cell in r2 for construct type feedback in optimized code 4124 // No cell in r2 for construct type feedback in optimized code
4092 Handle<Object> undefined_value(isolate()->factory()->undefined_value()); 4125 Handle<Object> undefined_value(isolate()->factory()->undefined_value());
4093 __ mov(r2, Operand(undefined_value)); 4126 __ mov(r2, Operand(undefined_value));
4094 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 4127 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
4095 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 4128 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
4096 } 4129 }
4097 4130
4098 4131
4099 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { 4132 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4133 ASSERT(ToRegister(instr->context()).is(cp));
4100 ASSERT(ToRegister(instr->constructor()).is(r1)); 4134 ASSERT(ToRegister(instr->constructor()).is(r1));
4101 ASSERT(ToRegister(instr->result()).is(r0)); 4135 ASSERT(ToRegister(instr->result()).is(r0));
4102 4136
4103 __ mov(r0, Operand(instr->arity())); 4137 __ mov(r0, Operand(instr->arity()));
4104 __ mov(r2, Operand(instr->hydrogen()->property_cell())); 4138 __ mov(r2, Operand(instr->hydrogen()->property_cell()));
4105 ElementsKind kind = instr->hydrogen()->elements_kind(); 4139 ElementsKind kind = instr->hydrogen()->elements_kind();
4106 AllocationSiteOverrideMode override_mode = 4140 AllocationSiteOverrideMode override_mode =
4107 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) 4141 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
4108 ? DISABLE_ALLOCATION_SITES 4142 ? DISABLE_ALLOCATION_SITES
4109 : DONT_OVERRIDE; 4143 : DONT_OVERRIDE;
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
4241 GetLinkRegisterState(), 4275 GetLinkRegisterState(),
4242 kSaveFPRegs, 4276 kSaveFPRegs,
4243 EMIT_REMEMBERED_SET, 4277 EMIT_REMEMBERED_SET,
4244 check_needed); 4278 check_needed);
4245 } 4279 }
4246 } 4280 }
4247 } 4281 }
4248 4282
4249 4283
4250 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 4284 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4285 ASSERT(ToRegister(instr->context()).is(cp));
4251 ASSERT(ToRegister(instr->object()).is(r1)); 4286 ASSERT(ToRegister(instr->object()).is(r1));
4252 ASSERT(ToRegister(instr->value()).is(r0)); 4287 ASSERT(ToRegister(instr->value()).is(r0));
4253 4288
4254 // Name is always in r2. 4289 // Name is always in r2.
4255 __ mov(r2, Operand(instr->name())); 4290 __ mov(r2, Operand(instr->name()));
4256 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4291 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4257 ? isolate()->builtins()->StoreIC_Initialize_Strict() 4292 ? isolate()->builtins()->StoreIC_Initialize_Strict()
4258 : isolate()->builtins()->StoreIC_Initialize(); 4293 : isolate()->builtins()->StoreIC_Initialize();
4259 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4294 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4260 } 4295 }
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
4464 DoStoreKeyedExternalArray(instr); 4499 DoStoreKeyedExternalArray(instr);
4465 } else if (instr->hydrogen()->value()->representation().IsDouble()) { 4500 } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4466 DoStoreKeyedFixedDoubleArray(instr); 4501 DoStoreKeyedFixedDoubleArray(instr);
4467 } else { 4502 } else {
4468 DoStoreKeyedFixedArray(instr); 4503 DoStoreKeyedFixedArray(instr);
4469 } 4504 }
4470 } 4505 }
4471 4506
4472 4507
4473 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 4508 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4509 ASSERT(ToRegister(instr->context()).is(cp));
4474 ASSERT(ToRegister(instr->object()).is(r2)); 4510 ASSERT(ToRegister(instr->object()).is(r2));
4475 ASSERT(ToRegister(instr->key()).is(r1)); 4511 ASSERT(ToRegister(instr->key()).is(r1));
4476 ASSERT(ToRegister(instr->value()).is(r0)); 4512 ASSERT(ToRegister(instr->value()).is(r0));
4477 4513
4478 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4514 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4479 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 4515 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
4480 : isolate()->builtins()->KeyedStoreIC_Initialize(); 4516 : isolate()->builtins()->KeyedStoreIC_Initialize();
4481 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS); 4517 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4482 } 4518 }
4483 4519
(...skipping 13 matching lines...) Expand all
4497 __ b(ne, &not_applicable); 4533 __ b(ne, &not_applicable);
4498 4534
4499 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { 4535 if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4500 Register new_map_reg = ToRegister(instr->new_map_temp()); 4536 Register new_map_reg = ToRegister(instr->new_map_temp());
4501 __ mov(new_map_reg, Operand(to_map)); 4537 __ mov(new_map_reg, Operand(to_map));
4502 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4538 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4503 // Write barrier. 4539 // Write barrier.
4504 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, 4540 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
4505 scratch, GetLinkRegisterState(), kDontSaveFPRegs); 4541 scratch, GetLinkRegisterState(), kDontSaveFPRegs);
4506 } else { 4542 } else {
4543 ASSERT(ToRegister(instr->context()).is(cp));
4507 PushSafepointRegistersScope scope( 4544 PushSafepointRegistersScope scope(
4508 this, Safepoint::kWithRegistersAndDoubles); 4545 this, Safepoint::kWithRegistersAndDoubles);
4509 __ Move(r0, object_reg); 4546 __ Move(r0, object_reg);
4510 __ Move(r1, to_map); 4547 __ Move(r1, to_map);
4511 TransitionElementsKindStub stub(from_kind, to_kind); 4548 TransitionElementsKindStub stub(from_kind, to_kind);
4512 __ CallStub(&stub); 4549 __ CallStub(&stub);
4513 RecordSafepointWithRegistersAndDoubles( 4550 RecordSafepointWithRegistersAndDoubles(
4514 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4551 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4515 } 4552 }
4516 __ bind(&not_applicable); 4553 __ bind(&not_applicable);
4517 } 4554 }
4518 4555
4519 4556
4520 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4557 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4521 Register object = ToRegister(instr->object()); 4558 Register object = ToRegister(instr->object());
4522 Register temp = ToRegister(instr->temp()); 4559 Register temp = ToRegister(instr->temp());
4523 __ TestJSArrayForAllocationMemento(object, temp); 4560 __ TestJSArrayForAllocationMemento(object, temp);
4524 DeoptimizeIf(eq, instr->environment()); 4561 DeoptimizeIf(eq, instr->environment());
4525 } 4562 }
4526 4563
4527 4564
4528 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4565 void LCodeGen::DoStringAdd(LStringAdd* instr) {
ulan 2013/09/26 10:06:33 ASSERT(ToRegister(instr->context()).is(cp));
Rodolph Perfetta 2013/09/26 12:20:12 Done.
4529 __ push(ToRegister(instr->left())); 4566 __ push(ToRegister(instr->left()));
4530 __ push(ToRegister(instr->right())); 4567 __ push(ToRegister(instr->right()));
4531 StringAddStub stub(instr->hydrogen()->flags()); 4568 StringAddStub stub(instr->hydrogen()->flags());
4532 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 4569 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
4533 } 4570 }
4534 4571
4535 4572
4536 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4573 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4537 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { 4574 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode {
4538 public: 4575 public:
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
4574 // DoStringCharCodeAt above. 4611 // DoStringCharCodeAt above.
4575 if (instr->index()->IsConstantOperand()) { 4612 if (instr->index()->IsConstantOperand()) {
4576 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); 4613 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
4577 __ mov(scratch, Operand(Smi::FromInt(const_index))); 4614 __ mov(scratch, Operand(Smi::FromInt(const_index)));
4578 __ push(scratch); 4615 __ push(scratch);
4579 } else { 4616 } else {
4580 Register index = ToRegister(instr->index()); 4617 Register index = ToRegister(instr->index());
4581 __ SmiTag(index); 4618 __ SmiTag(index);
4582 __ push(index); 4619 __ push(index);
4583 } 4620 }
4584 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); 4621 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr,
4622 instr->context());
4585 __ AssertSmi(r0); 4623 __ AssertSmi(r0);
4586 __ SmiUntag(r0); 4624 __ SmiUntag(r0);
4587 __ StoreToSafepointRegisterSlot(r0, result); 4625 __ StoreToSafepointRegisterSlot(r0, result);
4588 } 4626 }
4589 4627
4590 4628
4591 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 4629 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4592 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode { 4630 class DeferredStringCharFromCode V8_FINAL : public LDeferredCode {
4593 public: 4631 public:
4594 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) 4632 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
4626 Register result = ToRegister(instr->result()); 4664 Register result = ToRegister(instr->result());
4627 4665
4628 // TODO(3095996): Get rid of this. For now, we need to make the 4666 // TODO(3095996): Get rid of this. For now, we need to make the
4629 // result register contain a valid pointer because it is already 4667 // result register contain a valid pointer because it is already
4630 // contained in the register pointer map. 4668 // contained in the register pointer map.
4631 __ mov(result, Operand::Zero()); 4669 __ mov(result, Operand::Zero());
4632 4670
4633 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4671 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4634 __ SmiTag(char_code); 4672 __ SmiTag(char_code);
4635 __ push(char_code); 4673 __ push(char_code);
4636 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr); 4674 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
4637 __ StoreToSafepointRegisterSlot(r0, result); 4675 __ StoreToSafepointRegisterSlot(r0, result);
4638 } 4676 }
4639 4677
4640 4678
4641 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4679 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4642 LOperand* input = instr->value(); 4680 LOperand* input = instr->value();
4643 ASSERT(input->IsRegister() || input->IsStackSlot()); 4681 ASSERT(input->IsRegister() || input->IsStackSlot());
4644 LOperand* output = instr->result(); 4682 LOperand* output = instr->result();
4645 ASSERT(output->IsDoubleRegister()); 4683 ASSERT(output->IsDoubleRegister());
4646 SwVfpRegister single_scratch = double_scratch0().low(); 4684 SwVfpRegister single_scratch = double_scratch0().low();
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
4765 } 4803 }
4766 4804
4767 // Slow case: Call the runtime system to do the number allocation. 4805 // Slow case: Call the runtime system to do the number allocation.
4768 __ bind(&slow); 4806 __ bind(&slow);
4769 4807
4770 // TODO(3095996): Put a valid pointer value in the stack slot where the result 4808 // TODO(3095996): Put a valid pointer value in the stack slot where the result
4771 // register is stored, as this register is in the pointer map, but contains an 4809 // register is stored, as this register is in the pointer map, but contains an
4772 // integer value. 4810 // integer value.
4773 __ mov(ip, Operand::Zero()); 4811 __ mov(ip, Operand::Zero());
4774 __ StoreToSafepointRegisterSlot(ip, dst); 4812 __ StoreToSafepointRegisterSlot(ip, dst);
4775 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 4813 // NumberTagI and NumberTagD use the context from the frame, rather than
4814 // the environment's HContext or HInlinedContext value.
4815 // They only call Runtime::kAllocateHeapNumber.
4816 // The corresponding HChange instructions are added in a phase that does
4817 // not have easy access to the local context.
4818 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4819 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4820 RecordSafepointWithRegisters(
4821 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4776 __ Move(dst, r0); 4822 __ Move(dst, r0);
4777 __ sub(dst, dst, Operand(kHeapObjectTag)); 4823 __ sub(dst, dst, Operand(kHeapObjectTag));
4778 4824
4779 // Done. Put the value in dbl_scratch into the value of the allocated heap 4825 // Done. Put the value in dbl_scratch into the value of the allocated heap
4780 // number. 4826 // number.
4781 __ bind(&done); 4827 __ bind(&done);
4782 __ vstr(dbl_scratch, dst, HeapNumber::kValueOffset); 4828 __ vstr(dbl_scratch, dst, HeapNumber::kValueOffset);
4783 __ add(dst, dst, Operand(kHeapObjectTag)); 4829 __ add(dst, dst, Operand(kHeapObjectTag));
4784 __ StoreToSafepointRegisterSlot(dst, dst); 4830 __ StoreToSafepointRegisterSlot(dst, dst);
4785 } 4831 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
4821 4867
4822 4868
4823 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 4869 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4824 // TODO(3095996): Get rid of this. For now, we need to make the 4870 // TODO(3095996): Get rid of this. For now, we need to make the
4825 // result register contain a valid pointer because it is already 4871 // result register contain a valid pointer because it is already
4826 // contained in the register pointer map. 4872 // contained in the register pointer map.
4827 Register reg = ToRegister(instr->result()); 4873 Register reg = ToRegister(instr->result());
4828 __ mov(reg, Operand::Zero()); 4874 __ mov(reg, Operand::Zero());
4829 4875
4830 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 4876 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
4831 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); 4877 // NumberTagI and NumberTagD use the context from the frame, rather than
4878 // the environment's HContext or HInlinedContext value.
4879 // They only call Runtime::kAllocateHeapNumber.
4880 // The corresponding HChange instructions are added in a phase that does
4881 // not have easy access to the local context.
4882 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4883 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4884 RecordSafepointWithRegisters(
4885 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4832 __ sub(r0, r0, Operand(kHeapObjectTag)); 4886 __ sub(r0, r0, Operand(kHeapObjectTag));
4833 __ StoreToSafepointRegisterSlot(r0, reg); 4887 __ StoreToSafepointRegisterSlot(r0, reg);
4834 } 4888 }
4835 4889
4836 4890
4837 void LCodeGen::DoSmiTag(LSmiTag* instr) { 4891 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4838 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 4892 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
4839 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value())); 4893 __ SmiTag(ToRegister(instr->result()), ToRegister(instr->value()));
4840 } 4894 }
4841 4895
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after
5150 __ cmp(reg, Operand(object)); 5204 __ cmp(reg, Operand(object));
5151 } 5205 }
5152 DeoptimizeIf(ne, instr->environment()); 5206 DeoptimizeIf(ne, instr->environment());
5153 } 5207 }
5154 5208
5155 5209
5156 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { 5210 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
5157 { 5211 {
5158 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 5212 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
5159 __ push(object); 5213 __ push(object);
5160 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); 5214 __ mov(cp, Operand::Zero());
5215 __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
5216 RecordSafepointWithRegisters(
5217 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5161 __ StoreToSafepointRegisterSlot(r0, scratch0()); 5218 __ StoreToSafepointRegisterSlot(r0, scratch0());
5162 } 5219 }
5163 __ tst(scratch0(), Operand(kSmiTagMask)); 5220 __ tst(scratch0(), Operand(kSmiTagMask));
5164 DeoptimizeIf(eq, instr->environment()); 5221 DeoptimizeIf(eq, instr->environment());
5165 } 5222 }
5166 5223
5167 5224
5168 void LCodeGen::DoCheckMaps(LCheckMaps* instr) { 5225 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5169 class DeferredCheckMaps V8_FINAL : public LDeferredCode { 5226 class DeferredCheckMaps V8_FINAL : public LDeferredCode {
5170 public: 5227 public:
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
5353 __ SmiTag(size); 5410 __ SmiTag(size);
5354 __ push(size); 5411 __ push(size);
5355 } else { 5412 } else {
5356 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5413 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5357 __ Push(Smi::FromInt(size)); 5414 __ Push(Smi::FromInt(size));
5358 } 5415 }
5359 5416
5360 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { 5417 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5361 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); 5418 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
5362 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5419 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5363 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); 5420 CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr,
5421 instr->context());
5364 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { 5422 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5365 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5423 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
5366 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); 5424 CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr,
5425 instr->context());
5367 } else { 5426 } else {
5368 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); 5427 CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr,
5428 instr->context());
5369 } 5429 }
5370 __ StoreToSafepointRegisterSlot(r0, result); 5430 __ StoreToSafepointRegisterSlot(r0, result);
5371 } 5431 }
5372 5432
5373 5433
5374 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5434 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5375 ASSERT(ToRegister(instr->value()).is(r0)); 5435 ASSERT(ToRegister(instr->value()).is(r0));
5376 __ push(r0); 5436 __ push(r0);
5377 CallRuntime(Runtime::kToFastProperties, 1, instr); 5437 CallRuntime(Runtime::kToFastProperties, 1, instr);
5378 } 5438 }
5379 5439
5380 5440
5381 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 5441 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5442 ASSERT(ToRegister(instr->context()).is(cp));
5382 Label materialized; 5443 Label materialized;
5383 // Registers will be used as follows: 5444 // Registers will be used as follows:
5384 // r6 = literals array. 5445 // r6 = literals array.
5385 // r1 = regexp literal. 5446 // r1 = regexp literal.
5386 // r0 = regexp literal clone. 5447 // r0 = regexp literal clone.
5387 // r2-5 are used as temporaries. 5448 // r2-5 are used as temporaries.
5388 int literal_offset = 5449 int literal_offset =
5389 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); 5450 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5390 __ LoadHeapObject(r6, instr->hydrogen()->literals()); 5451 __ LoadHeapObject(r6, instr->hydrogen()->literals());
5391 __ ldr(r1, FieldMemOperand(r6, literal_offset)); 5452 __ ldr(r1, FieldMemOperand(r6, literal_offset));
(...skipping 23 matching lines...) Expand all
5415 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5476 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5416 __ pop(r1); 5477 __ pop(r1);
5417 5478
5418 __ bind(&allocated); 5479 __ bind(&allocated);
5419 // Copy the content into the newly allocated memory. 5480 // Copy the content into the newly allocated memory.
5420 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize); 5481 __ CopyFields(r0, r1, double_scratch0(), size / kPointerSize);
5421 } 5482 }
5422 5483
5423 5484
5424 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5485 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5486 ASSERT(ToRegister(instr->context()).is(cp));
5425 // Use the fast case closure allocation code that allocates in new 5487 // Use the fast case closure allocation code that allocates in new
5426 // space for nested functions that don't need literals cloning. 5488 // space for nested functions that don't need literals cloning.
5427 bool pretenure = instr->hydrogen()->pretenure(); 5489 bool pretenure = instr->hydrogen()->pretenure();
5428 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5490 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5429 FastNewClosureStub stub(instr->hydrogen()->language_mode(), 5491 FastNewClosureStub stub(instr->hydrogen()->language_mode(),
5430 instr->hydrogen()->is_generator()); 5492 instr->hydrogen()->is_generator());
5431 __ mov(r2, Operand(instr->hydrogen()->shared_info())); 5493 __ mov(r2, Operand(instr->hydrogen()->shared_info()));
5432 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5494 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5433 } else { 5495 } else {
5434 __ mov(r2, Operand(instr->hydrogen()->shared_info())); 5496 __ mov(r2, Operand(instr->hydrogen()->shared_info()));
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
5609 } 5671 }
5610 5672
5611 5673
5612 void LCodeGen::DoDummyUse(LDummyUse* instr) { 5674 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5613 // Nothing to see here, move on! 5675 // Nothing to see here, move on!
5614 } 5676 }
5615 5677
5616 5678
5617 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 5679 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5618 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 5680 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
5681 LoadContextFromDeferred(instr->context());
5619 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 5682 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5620 RecordSafepointWithLazyDeopt( 5683 RecordSafepointWithLazyDeopt(
5621 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 5684 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
5622 ASSERT(instr->HasEnvironment()); 5685 ASSERT(instr->HasEnvironment());
5623 LEnvironment* env = instr->environment(); 5686 LEnvironment* env = instr->environment();
5624 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5687 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5625 } 5688 }
5626 5689
5627 5690
5628 void LCodeGen::DoStackCheck(LStackCheck* instr) { 5691 void LCodeGen::DoStackCheck(LStackCheck* instr) {
(...skipping 13 matching lines...) Expand all
5642 LEnvironment* env = instr->environment(); 5705 LEnvironment* env = instr->environment();
5643 // There is no LLazyBailout instruction for stack-checks. We have to 5706 // There is no LLazyBailout instruction for stack-checks. We have to
5644 // prepare for lazy deoptimization explicitly here. 5707 // prepare for lazy deoptimization explicitly here.
5645 if (instr->hydrogen()->is_function_entry()) { 5708 if (instr->hydrogen()->is_function_entry()) {
5646 // Perform stack overflow check. 5709 // Perform stack overflow check.
5647 Label done; 5710 Label done;
5648 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5711 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5649 __ cmp(sp, Operand(ip)); 5712 __ cmp(sp, Operand(ip));
5650 __ b(hs, &done); 5713 __ b(hs, &done);
5651 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); 5714 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
5715 ASSERT(instr->context()->IsRegister());
5716 ASSERT(ToRegister(instr->context()).is(cp));
5652 CallCode(isolate()->builtins()->StackCheck(), 5717 CallCode(isolate()->builtins()->StackCheck(),
5653 RelocInfo::CODE_TARGET, 5718 RelocInfo::CODE_TARGET,
5654 instr); 5719 instr);
5655 EnsureSpaceForLazyDeopt(); 5720 EnsureSpaceForLazyDeopt();
5656 last_lazy_deopt_pc_ = masm()->pc_offset(); 5721 last_lazy_deopt_pc_ = masm()->pc_offset();
5657 __ bind(&done); 5722 __ bind(&done);
5658 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5723 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5659 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5724 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5660 } else { 5725 } else {
5661 ASSERT(instr->hydrogen()->is_backwards_branch()); 5726 ASSERT(instr->hydrogen()->is_backwards_branch());
ulan 2013/09/26 10:06:33 Indentation is off.
Rodolph Perfetta 2013/09/26 12:20:12 Done.
5662 // Perform stack overflow check if this goto needs it before jumping. 5727 // Perform stack overflow check if this goto needs it before jumping.
5663 DeferredStackCheck* deferred_stack_check = 5728 DeferredStackCheck* deferred_stack_check =
5664 new(zone()) DeferredStackCheck(this, instr); 5729 new(zone()) DeferredStackCheck(this, instr);
5665 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5730 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5666 __ cmp(sp, Operand(ip)); 5731 __ cmp(sp, Operand(ip));
5667 __ b(lo, deferred_stack_check->entry()); 5732 __ b(lo, deferred_stack_check->entry());
5668 EnsureSpaceForLazyDeopt(); 5733 EnsureSpaceForLazyDeopt();
5669 last_lazy_deopt_pc_ = masm()->pc_offset(); 5734 last_lazy_deopt_pc_ = masm()->pc_offset();
5670 __ bind(instr->done_label()); 5735 __ bind(instr->done_label());
5671 deferred_stack_check->SetExit(instr->done_label()); 5736 deferred_stack_check->SetExit(instr->done_label());
5672 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5737 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5673 // Don't record a deoptimization index for the safepoint here. 5738 // Don't record a deoptimization index for the safepoint here.
5674 // This will be done explicitly when emitting call and the safepoint in 5739 // This will be done explicitly when emitting call and the safepoint in
5675 // the deferred code. 5740 // the deferred code.
5676 } 5741 }
5677 } 5742 }
5678 5743
5679 5744
5680 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 5745 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5681 // This is a pseudo-instruction that ensures that the environment here is 5746 // This is a pseudo-instruction that ensures that the environment here is
5682 // properly registered for deoptimization and records the assembler's PC 5747 // properly registered for deoptimization and records the assembler's PC
5683 // offset. 5748 // offset.
5684 LEnvironment* environment = instr->environment(); 5749 LEnvironment* environment = instr->environment();
5685 5750
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
5782 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); 5847 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5783 __ ldr(result, FieldMemOperand(scratch, 5848 __ ldr(result, FieldMemOperand(scratch,
5784 FixedArray::kHeaderSize - kPointerSize)); 5849 FixedArray::kHeaderSize - kPointerSize));
5785 __ bind(&done); 5850 __ bind(&done);
5786 } 5851 }
5787 5852
5788 5853
5789 #undef __ 5854 #undef __
5790 5855
5791 } } // namespace v8::internal 5856 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698