Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(24)

Side by Side Diff: src/ppc/builtins-ppc.cc

Issue 714093002: PowerPC specific sub-directories. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ppc/assembler-ppc-inl.h ('k') | src/ppc/code-stubs-ppc.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_PPC
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/debug.h" 10 #include "src/debug.h"
11 #include "src/deoptimizer.h" 11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h" 12 #include "src/full-codegen.h"
13 #include "src/runtime/runtime.h" 13 #include "src/runtime/runtime.h"
14 14
15 namespace v8 { 15 namespace v8 {
16 namespace internal { 16 namespace internal {
17 17
18 18
19 #define __ ACCESS_MASM(masm) 19 #define __ ACCESS_MASM(masm)
20 20
21 21
22 void Builtins::Generate_Adaptor(MacroAssembler* masm, 22 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
23 CFunctionId id,
24 BuiltinExtraArguments extra_args) { 23 BuiltinExtraArguments extra_args) {
25 // ----------- S t a t e ------------- 24 // ----------- S t a t e -------------
26 // -- r0 : number of arguments excluding receiver 25 // -- r3 : number of arguments excluding receiver
27 // -- r1 : called function (only guaranteed when 26 // -- r4 : called function (only guaranteed when
28 // extra_args requires it) 27 // extra_args requires it)
29 // -- cp : context 28 // -- cp : context
30 // -- sp[0] : last argument 29 // -- sp[0] : last argument
31 // -- ... 30 // -- ...
32 // -- sp[4 * (argc - 1)] : first argument (argc == r0) 31 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
33 // -- sp[4 * argc] : receiver 32 // -- sp[4 * argc] : receiver
34 // ----------------------------------- 33 // -----------------------------------
35 34
36 // Insert extra arguments. 35 // Insert extra arguments.
37 int num_extra_args = 0; 36 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) { 37 if (extra_args == NEEDS_CALLED_FUNCTION) {
39 num_extra_args = 1; 38 num_extra_args = 1;
40 __ push(r1); 39 __ push(r4);
41 } else { 40 } else {
42 DCHECK(extra_args == NO_EXTRA_ARGUMENTS); 41 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
43 } 42 }
44 43
45 // JumpToExternalReference expects r0 to contain the number of arguments 44 // JumpToExternalReference expects r0 to contain the number of arguments
46 // including the receiver and the extra arguments. 45 // including the receiver and the extra arguments.
47 __ add(r0, r0, Operand(num_extra_args + 1)); 46 __ addi(r3, r3, Operand(num_extra_args + 1));
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); 47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
49 } 48 }
50 49
51 50
52 // Load the built-in InternalArray function from the current context. 51 // Load the built-in InternalArray function from the current context.
53 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, 52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
54 Register result) { 53 Register result) {
55 // Load the native context. 54 // Load the native context.
56 55
57 __ ldr(result, 56 __ LoadP(result,
58 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 57 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
59 __ ldr(result, 58 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
60 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
61 // Load the InternalArray function from the native context. 59 // Load the InternalArray function from the native context.
62 __ ldr(result, 60 __ LoadP(result,
63 MemOperand(result, 61 MemOperand(result, Context::SlotOffset(
64 Context::SlotOffset( 62 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
65 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
66 } 63 }
67 64
68 65
69 // Load the built-in Array function from the current context. 66 // Load the built-in Array function from the current context.
70 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { 67 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
71 // Load the native context. 68 // Load the native context.
72 69
73 __ ldr(result, 70 __ LoadP(result,
74 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 71 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
75 __ ldr(result, 72 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
76 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
77 // Load the Array function from the native context. 73 // Load the Array function from the native context.
78 __ ldr(result, 74 __ LoadP(
79 MemOperand(result, 75 result,
80 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 76 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
81 } 77 }
82 78
83 79
84 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 80 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
85 // ----------- S t a t e ------------- 81 // ----------- S t a t e -------------
86 // -- r0 : number of arguments 82 // -- r3 : number of arguments
87 // -- lr : return address 83 // -- lr : return address
88 // -- sp[...]: constructor arguments 84 // -- sp[...]: constructor arguments
89 // ----------------------------------- 85 // -----------------------------------
90 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 86 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
91 87
92 // Get the InternalArray function. 88 // Get the InternalArray function.
93 GenerateLoadInternalArrayFunction(masm, r1); 89 GenerateLoadInternalArrayFunction(masm, r4);
94 90
95 if (FLAG_debug_code) { 91 if (FLAG_debug_code) {
96 // Initial map for the builtin InternalArray functions should be maps. 92 // Initial map for the builtin InternalArray functions should be maps.
97 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 93 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
98 __ SmiTst(r2); 94 __ TestIfSmi(r5, r0);
99 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); 95 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
100 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 96 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); 97 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102 } 98 }
103 99
104 // Run the native code for the InternalArray function called as a normal 100 // Run the native code for the InternalArray function called as a normal
105 // function. 101 // function.
106 // tail call a stub 102 // tail call a stub
107 InternalArrayConstructorStub stub(masm->isolate()); 103 InternalArrayConstructorStub stub(masm->isolate());
108 __ TailCallStub(&stub); 104 __ TailCallStub(&stub);
109 } 105 }
110 106
111 107
112 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 108 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
113 // ----------- S t a t e ------------- 109 // ----------- S t a t e -------------
114 // -- r0 : number of arguments 110 // -- r3 : number of arguments
115 // -- lr : return address 111 // -- lr : return address
116 // -- sp[...]: constructor arguments 112 // -- sp[...]: constructor arguments
117 // ----------------------------------- 113 // -----------------------------------
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 114 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119 115
120 // Get the Array function. 116 // Get the Array function.
121 GenerateLoadArrayFunction(masm, r1); 117 GenerateLoadArrayFunction(masm, r4);
122 118
123 if (FLAG_debug_code) { 119 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps. 120 // Initial map for the builtin Array functions should be maps.
125 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 121 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
126 __ SmiTst(r2); 122 __ TestIfSmi(r5, r0);
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); 123 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
128 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 124 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
129 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); 125 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 } 126 }
131 127
132 // Run the native code for the Array function called as a normal function. 128 // Run the native code for the Array function called as a normal function.
133 // tail call a stub 129 // tail call a stub
134 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 130 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
135 ArrayConstructorStub stub(masm->isolate()); 131 ArrayConstructorStub stub(masm->isolate());
136 __ TailCallStub(&stub); 132 __ TailCallStub(&stub);
137 } 133 }
138 134
139 135
140 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { 136 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
141 // ----------- S t a t e ------------- 137 // ----------- S t a t e -------------
142 // -- r0 : number of arguments 138 // -- r3 : number of arguments
143 // -- r1 : constructor function 139 // -- r4 : constructor function
144 // -- lr : return address 140 // -- lr : return address
145 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 141 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
146 // -- sp[argc * 4] : receiver 142 // -- sp[argc * 4] : receiver
147 // ----------------------------------- 143 // -----------------------------------
148 Counters* counters = masm->isolate()->counters(); 144 Counters* counters = masm->isolate()->counters();
149 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3); 145 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
150 146
151 Register function = r1; 147 Register function = r4;
152 if (FLAG_debug_code) { 148 if (FLAG_debug_code) {
153 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2); 149 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
154 __ cmp(function, Operand(r2)); 150 __ cmp(function, r5);
155 __ Assert(eq, kUnexpectedStringFunction); 151 __ Assert(eq, kUnexpectedStringFunction);
156 } 152 }
157 153
158 // Load the first arguments in r0 and get rid of the rest. 154 // Load the first arguments in r3 and get rid of the rest.
159 Label no_arguments; 155 Label no_arguments;
160 __ cmp(r0, Operand::Zero()); 156 __ cmpi(r3, Operand::Zero());
161 __ b(eq, &no_arguments); 157 __ beq(&no_arguments);
162 // First args = sp[(argc - 1) * 4]. 158 // First args = sp[(argc - 1) * 4].
163 __ sub(r0, r0, Operand(1)); 159 __ subi(r3, r3, Operand(1));
164 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex)); 160 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
161 __ add(sp, sp, r3);
162 __ LoadP(r3, MemOperand(sp));
165 // sp now point to args[0], drop args[0] + receiver. 163 // sp now point to args[0], drop args[0] + receiver.
166 __ Drop(2); 164 __ Drop(2);
167 165
168 Register argument = r2; 166 Register argument = r5;
169 Label not_cached, argument_is_string; 167 Label not_cached, argument_is_string;
170 __ LookupNumberStringCache(r0, // Input. 168 __ LookupNumberStringCache(r3, // Input.
171 argument, // Result. 169 argument, // Result.
172 r3, // Scratch. 170 r6, // Scratch.
173 r4, // Scratch. 171 r7, // Scratch.
174 r5, // Scratch. 172 r8, // Scratch.
175 &not_cached); 173 &not_cached);
176 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4); 174 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
177 __ bind(&argument_is_string); 175 __ bind(&argument_is_string);
178 176
179 // ----------- S t a t e ------------- 177 // ----------- S t a t e -------------
180 // -- r2 : argument converted to string 178 // -- r5 : argument converted to string
181 // -- r1 : constructor function 179 // -- r4 : constructor function
182 // -- lr : return address 180 // -- lr : return address
183 // ----------------------------------- 181 // -----------------------------------
184 182
185 Label gc_required; 183 Label gc_required;
186 __ Allocate(JSValue::kSize, 184 __ Allocate(JSValue::kSize,
187 r0, // Result. 185 r3, // Result.
188 r3, // Scratch. 186 r6, // Scratch.
189 r4, // Scratch. 187 r7, // Scratch.
190 &gc_required, 188 &gc_required, TAG_OBJECT);
191 TAG_OBJECT);
192 189
193 // Initialising the String Object. 190 // Initialising the String Object.
194 Register map = r3; 191 Register map = r6;
195 __ LoadGlobalFunctionInitialMap(function, map, r4); 192 __ LoadGlobalFunctionInitialMap(function, map, r7);
196 if (FLAG_debug_code) { 193 if (FLAG_debug_code) {
197 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset)); 194 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
198 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2)); 195 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
199 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); 196 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
200 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); 197 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
201 __ cmp(r4, Operand::Zero()); 198 __ cmpi(r7, Operand::Zero());
202 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); 199 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
203 } 200 }
204 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset)); 201 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
205 202
206 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex); 203 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
207 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 204 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
208 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset)); 205 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
209 206
210 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset)); 207 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
211 208
212 // Ensure the object is fully initialized. 209 // Ensure the object is fully initialized.
213 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); 210 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
214 211
215 __ Ret(); 212 __ Ret();
216 213
217 // The argument was not found in the number to string cache. Check 214 // The argument was not found in the number to string cache. Check
218 // if it's a string already before calling the conversion builtin. 215 // if it's a string already before calling the conversion builtin.
219 Label convert_argument; 216 Label convert_argument;
220 __ bind(&not_cached); 217 __ bind(&not_cached);
221 __ JumpIfSmi(r0, &convert_argument); 218 __ JumpIfSmi(r3, &convert_argument);
222 219
223 // Is it a String? 220 // Is it a String?
224 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 221 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
225 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 222 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
226 STATIC_ASSERT(kNotStringTag != 0); 223 STATIC_ASSERT(kNotStringTag != 0);
227 __ tst(r3, Operand(kIsNotStringMask)); 224 __ andi(r0, r6, Operand(kIsNotStringMask));
228 __ b(ne, &convert_argument); 225 __ bne(&convert_argument, cr0);
229 __ mov(argument, r0); 226 __ mr(argument, r3);
230 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 227 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
231 __ b(&argument_is_string); 228 __ b(&argument_is_string);
232 229
233 // Invoke the conversion builtin and put the result into r2. 230 // Invoke the conversion builtin and put the result into r5.
234 __ bind(&convert_argument); 231 __ bind(&convert_argument);
235 __ push(function); // Preserve the function. 232 __ push(function); // Preserve the function.
236 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4); 233 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
237 { 234 {
238 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 235 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
239 __ push(r0); 236 __ push(r3);
240 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 237 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
241 } 238 }
242 __ pop(function); 239 __ pop(function);
243 __ mov(argument, r0); 240 __ mr(argument, r3);
244 __ b(&argument_is_string); 241 __ b(&argument_is_string);
245 242
246 // Load the empty string into r2, remove the receiver from the 243 // Load the empty string into r5, remove the receiver from the
247 // stack, and jump back to the case where the argument is a string. 244 // stack, and jump back to the case where the argument is a string.
248 __ bind(&no_arguments); 245 __ bind(&no_arguments);
249 __ LoadRoot(argument, Heap::kempty_stringRootIndex); 246 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
250 __ Drop(1); 247 __ Drop(1);
251 __ b(&argument_is_string); 248 __ b(&argument_is_string);
252 249
253 // At this point the argument is already a string. Call runtime to 250 // At this point the argument is already a string. Call runtime to
254 // create a string wrapper. 251 // create a string wrapper.
255 __ bind(&gc_required); 252 __ bind(&gc_required);
256 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4); 253 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
257 { 254 {
258 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 255 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
259 __ push(argument); 256 __ push(argument);
260 __ CallRuntime(Runtime::kNewStringWrapper, 1); 257 __ CallRuntime(Runtime::kNewStringWrapper, 1);
261 } 258 }
262 __ Ret(); 259 __ Ret();
263 } 260 }
264 261
265 262
266 static void CallRuntimePassFunction( 263 static void CallRuntimePassFunction(MacroAssembler* masm,
267 MacroAssembler* masm, Runtime::FunctionId function_id) { 264 Runtime::FunctionId function_id) {
268 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 265 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
269 // Push a copy of the function onto the stack. 266 // Push a copy of the function onto the stack.
270 __ push(r1);
271 // Push function as parameter to the runtime call. 267 // Push function as parameter to the runtime call.
272 __ Push(r1); 268 __ Push(r4, r4);
273 269
274 __ CallRuntime(function_id, 1); 270 __ CallRuntime(function_id, 1);
275 // Restore receiver. 271 // Restore reciever.
276 __ pop(r1); 272 __ Pop(r4);
277 } 273 }
278 274
279 275
280 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 276 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
281 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 277 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
282 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); 278 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
283 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); 279 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
284 __ Jump(r2); 280 __ JumpToJSEntry(ip);
285 } 281 }
286 282
287 283
288 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { 284 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
289 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 285 __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
290 __ Jump(r0); 286 __ JumpToJSEntry(ip);
291 } 287 }
292 288
293 289
294 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 290 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
295 // Checking whether the queued function is ready for install is optional, 291 // Checking whether the queued function is ready for install is optional,
296 // since we come across interrupts and stack checks elsewhere. However, 292 // since we come across interrupts and stack checks elsewhere. However,
297 // not checking may delay installing ready functions, and always checking 293 // not checking may delay installing ready functions, and always checking
298 // would be quite expensive. A good compromise is to first check against 294 // would be quite expensive. A good compromise is to first check against
299 // stack limit as a cue for an interrupt signal. 295 // stack limit as a cue for an interrupt signal.
300 Label ok; 296 Label ok;
301 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 297 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
302 __ cmp(sp, Operand(ip)); 298 __ cmpl(sp, ip);
303 __ b(hs, &ok); 299 __ bge(&ok);
304 300
305 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); 301 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
306 GenerateTailCallToReturnedCode(masm); 302 GenerateTailCallToReturnedCode(masm);
307 303
308 __ bind(&ok); 304 __ bind(&ok);
309 GenerateTailCallToSharedCode(masm); 305 GenerateTailCallToSharedCode(masm);
310 } 306 }
311 307
312 308
313 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 309 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
314 bool is_api_function, 310 bool is_api_function,
315 bool create_memento) { 311 bool create_memento) {
316 // ----------- S t a t e ------------- 312 // ----------- S t a t e -------------
317 // -- r0 : number of arguments 313 // -- r3 : number of arguments
318 // -- r1 : constructor function 314 // -- r4 : constructor function
319 // -- r2 : allocation site or undefined 315 // -- r5 : allocation site or undefined
320 // -- lr : return address 316 // -- lr : return address
321 // -- sp[...]: constructor arguments 317 // -- sp[...]: constructor arguments
322 // ----------------------------------- 318 // -----------------------------------
323 319
324 // Should never create mementos for api functions. 320 // Should never create mementos for api functions.
325 DCHECK(!is_api_function || !create_memento); 321 DCHECK(!is_api_function || !create_memento);
326 322
327 Isolate* isolate = masm->isolate(); 323 Isolate* isolate = masm->isolate();
328 324
329 // Enter a construct frame. 325 // Enter a construct frame.
330 { 326 {
331 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT); 327 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
332 328
333 if (create_memento) { 329 if (create_memento) {
334 __ AssertUndefinedOrAllocationSite(r2, r3); 330 __ AssertUndefinedOrAllocationSite(r5, r6);
335 __ push(r2); 331 __ push(r5);
336 } 332 }
337 333
338 // Preserve the two incoming parameters on the stack. 334 // Preserve the two incoming parameters on the stack.
339 __ SmiTag(r0); 335 __ SmiTag(r3);
340 __ push(r0); // Smi-tagged arguments count. 336 __ push(r3); // Smi-tagged arguments count.
341 __ push(r1); // Constructor function. 337 __ push(r4); // Constructor function.
342 338
343 // Try to allocate the object without transitioning into C code. If any of 339 // Try to allocate the object without transitioning into C code. If any of
344 // the preconditions is not met, the code bails out to the runtime call. 340 // the preconditions is not met, the code bails out to the runtime call.
345 Label rt_call, allocated; 341 Label rt_call, allocated;
346 if (FLAG_inline_new) { 342 if (FLAG_inline_new) {
347 Label undo_allocation; 343 Label undo_allocation;
348 ExternalReference debug_step_in_fp = 344 ExternalReference debug_step_in_fp =
349 ExternalReference::debug_step_in_fp_address(isolate); 345 ExternalReference::debug_step_in_fp_address(isolate);
350 __ mov(r2, Operand(debug_step_in_fp)); 346 __ mov(r5, Operand(debug_step_in_fp));
351 __ ldr(r2, MemOperand(r2)); 347 __ LoadP(r5, MemOperand(r5));
352 __ tst(r2, r2); 348 __ cmpi(r5, Operand::Zero());
353 __ b(ne, &rt_call); 349 __ bne(&rt_call);
354 350
355 // Load the initial map and verify that it is in fact a map. 351 // Load the initial map and verify that it is in fact a map.
356 // r1: constructor function 352 // r4: constructor function
357 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 353 __ LoadP(r5,
358 __ JumpIfSmi(r2, &rt_call); 354 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
359 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 355 __ JumpIfSmi(r5, &rt_call);
360 __ b(ne, &rt_call); 356 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
357 __ bne(&rt_call);
361 358
362 // Check that the constructor is not constructing a JSFunction (see 359 // Check that the constructor is not constructing a JSFunction (see
363 // comments in Runtime_NewObject in runtime.cc). In which case the 360 // comments in Runtime_NewObject in runtime.cc). In which case the
364 // initial map's instance type would be JS_FUNCTION_TYPE. 361 // initial map's instance type would be JS_FUNCTION_TYPE.
365 // r1: constructor function 362 // r4: constructor function
366 // r2: initial map 363 // r5: initial map
367 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); 364 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE);
368 __ b(eq, &rt_call); 365 __ beq(&rt_call);
369 366
370 if (!is_api_function) { 367 if (!is_api_function) {
371 Label allocate; 368 Label allocate;
372 MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset); 369 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
373 // Check if slack tracking is enabled. 370 // Check if slack tracking is enabled.
374 __ ldr(r4, bit_field3); 371 __ lwz(r7, bit_field3);
375 __ DecodeField<Map::ConstructionCount>(r3, r4); 372 __ DecodeField<Map::ConstructionCount>(r11, r7);
376 __ cmp(r3, Operand(JSFunction::kNoSlackTracking)); 373 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
377 __ b(eq, &allocate); 374 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking
375 __ beq(&allocate);
378 // Decrease generous allocation count. 376 // Decrease generous allocation count.
379 __ sub(r4, r4, Operand(1 << Map::ConstructionCount::kShift)); 377 __ Add(r7, r7, -(1 << Map::ConstructionCount::kShift), r0);
380 __ str(r4, bit_field3); 378 __ stw(r7, bit_field3);
381 __ cmp(r3, Operand(JSFunction::kFinishSlackTracking)); 379 __ cmpi(r11, Operand(JSFunction::kFinishSlackTracking));
382 __ b(ne, &allocate); 380 __ bne(&allocate);
383 381
384 __ push(r1); 382 __ push(r4);
385 383
386 __ Push(r2, r1); // r1 = constructor 384 __ Push(r5, r4); // r4 = constructor
387 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 385 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
388 386
389 __ pop(r2); 387 __ Pop(r4, r5);
390 __ pop(r1);
391 388
392 __ bind(&allocate); 389 __ bind(&allocate);
393 } 390 }
394 391
395 // Now allocate the JSObject on the heap. 392 // Now allocate the JSObject on the heap.
396 // r1: constructor function 393 // r4: constructor function
397 // r2: initial map 394 // r5: initial map
398 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset)); 395 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
399 if (create_memento) { 396 if (create_memento) {
400 __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize)); 397 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
401 } 398 }
402 399
403 __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS); 400 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS);
404 401
405 // Allocated the JSObject, now initialize the fields. Map is set to 402 // Allocated the JSObject, now initialize the fields. Map is set to
406 // initial map and properties and elements are set to empty fixed array. 403 // initial map and properties and elements are set to empty fixed array.
407 // r1: constructor function 404 // r4: constructor function
408 // r2: initial map 405 // r5: initial map
409 // r3: object size (not including memento if create_memento) 406 // r6: object size (not including memento if create_memento)
410 // r4: JSObject (not tagged) 407 // r7: JSObject (not tagged)
411 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 408 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
412 __ mov(r5, r4); 409 __ mr(r8, r7);
413 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset); 410 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
414 __ str(r2, MemOperand(r5, kPointerSize, PostIndex)); 411 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
415 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset); 412 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
416 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 413 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
417 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset); 414
418 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 415 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
416 __ add(r9, r7, r9); // End of object.
419 417
420 // Fill all the in-object properties with the appropriate filler. 418 // Fill all the in-object properties with the appropriate filler.
421 // r1: constructor function 419 // r4: constructor function
422 // r2: initial map 420 // r5: initial map
423 // r3: object size (in words, including memento if create_memento) 421 // r6: object size (in words, including memento if create_memento)
424 // r4: JSObject (not tagged) 422 // r7: JSObject (not tagged)
425 // r5: First in-object property of JSObject (not tagged) 423 // r8: First in-object property of JSObject (not tagged)
424 // r9: End of object
426 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize); 425 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
427 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); 426 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
428 427
429 if (!is_api_function) { 428 if (!is_api_function) {
430 Label no_inobject_slack_tracking; 429 Label no_inobject_slack_tracking;
431 430
432 // Check if slack tracking is enabled. 431 // Check if slack tracking is enabled.
433 __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset)); 432 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
434 __ DecodeField<Map::ConstructionCount>(ip); 433 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking
435 __ cmp(ip, Operand(JSFunction::kNoSlackTracking)); 434 __ beq(&no_inobject_slack_tracking);
436 __ b(eq, &no_inobject_slack_tracking);
437 435
438 // Allocate object with a slack. 436 // Allocate object with a slack.
439 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); 437 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
440 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
441 kBitsPerByte);
442 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
443 // r0: offset of first field after pre-allocated fields
444 if (FLAG_debug_code) { 438 if (FLAG_debug_code) {
445 __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 439 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
446 __ cmp(r0, ip); 440 __ add(r0, r8, r0);
441 // r0: offset of first field after pre-allocated fields
442 __ cmp(r0, r9);
447 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); 443 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
448 } 444 }
449 __ InitializeFieldsWithFiller(r5, r0, r6); 445 {
446 Label done;
447 __ cmpi(r3, Operand::Zero());
448 __ beq(&done);
449 __ InitializeNFieldsWithFiller(r8, r3, r10);
450 __ bind(&done);
451 }
450 // To allow for truncation. 452 // To allow for truncation.
451 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex); 453 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
452 // Fill the remaining fields with one pointer filler map. 454 // Fill the remaining fields with one pointer filler map.
453 455
454 __ bind(&no_inobject_slack_tracking); 456 __ bind(&no_inobject_slack_tracking);
455 } 457 }
456 458
457 if (create_memento) { 459 if (create_memento) {
458 __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize)); 460 __ subi(r3, r9, Operand(AllocationMemento::kSize));
459 __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object. 461 __ InitializeFieldsWithFiller(r8, r3, r10);
460 __ InitializeFieldsWithFiller(r5, r0, r6);
461 462
462 // Fill in memento fields. 463 // Fill in memento fields.
463 // r5: points to the allocated but uninitialized memento. 464 // r8: points to the allocated but uninitialized memento.
464 __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex); 465 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
465 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset); 466 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
466 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
467 // Load the AllocationSite 467 // Load the AllocationSite
468 __ ldr(r6, MemOperand(sp, 2 * kPointerSize)); 468 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize));
469 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset); 469 __ StoreP(r10,
470 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 470 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
471 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
472 kPointerSize));
471 } else { 473 } else {
472 __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 474 __ InitializeFieldsWithFiller(r8, r9, r10);
473 __ InitializeFieldsWithFiller(r5, r0, r6);
474 } 475 }
475 476
476 // Add the object tag to make the JSObject real, so that we can continue 477 // Add the object tag to make the JSObject real, so that we can continue
477 // and jump into the continuation code at any time from now on. Any 478 // and jump into the continuation code at any time from now on. Any
478 // failures need to undo the allocation, so that the heap is in a 479 // failures need to undo the allocation, so that the heap is in a
479 // consistent state and verifiable. 480 // consistent state and verifiable.
480 __ add(r4, r4, Operand(kHeapObjectTag)); 481 __ addi(r7, r7, Operand(kHeapObjectTag));
481 482
482 // Check if a non-empty properties array is needed. Continue with 483 // Check if a non-empty properties array is needed. Continue with
483 // allocated object if not fall through to runtime call if it is. 484 // allocated object if not fall through to runtime call if it is.
484 // r1: constructor function 485 // r4: constructor function
485 // r4: JSObject 486 // r7: JSObject
486 // r5: start of next object (not tagged) 487 // r8: start of next object (not tagged)
487 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset)); 488 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
488 // The field instance sizes contains both pre-allocated property fields 489 // The field instance sizes contains both pre-allocated property fields
489 // and in-object properties. 490 // and in-object properties.
490 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); 491 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
491 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, 492 __ add(r6, r6, r0);
492 kBitsPerByte); 493 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset));
493 __ add(r3, r3, Operand(r6)); 494 __ sub(r6, r6, r0, LeaveOE, SetRC);
494 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
495 kBitsPerByte);
496 __ sub(r3, r3, Operand(r6), SetCC);
497 495
498 // Done if no extra properties are to be allocated. 496 // Done if no extra properties are to be allocated.
499 __ b(eq, &allocated); 497 __ beq(&allocated, cr0);
500 __ Assert(pl, kPropertyAllocationCountFailed); 498 __ Assert(ge, kPropertyAllocationCountFailed, cr0);
501 499
502 // Scale the number of elements by pointer size and add the header for 500 // Scale the number of elements by pointer size and add the header for
503 // FixedArrays to the start of the next object calculation from above. 501 // FixedArrays to the start of the next object calculation from above.
504 // r1: constructor 502 // r4: constructor
505 // r3: number of elements in properties array 503 // r6: number of elements in properties array
506 // r4: JSObject 504 // r7: JSObject
507 // r5: start of next object 505 // r8: start of next object
508 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize)); 506 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize));
509 __ Allocate( 507 __ Allocate(
510 r0, 508 r3, r8, r9, r5, &undo_allocation,
511 r5,
512 r6,
513 r2,
514 &undo_allocation,
515 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); 509 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
516 510
517 // Initialize the FixedArray. 511 // Initialize the FixedArray.
518 // r1: constructor 512 // r4: constructor
519 // r3: number of elements in properties array 513 // r6: number of elements in properties array
520 // r4: JSObject 514 // r7: JSObject
521 // r5: FixedArray (not tagged) 515 // r8: FixedArray (not tagged)
522 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex); 516 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
523 __ mov(r2, r5); 517 __ mr(r5, r8);
524 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset); 518 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
525 __ str(r6, MemOperand(r2, kPointerSize, PostIndex)); 519 __ StoreP(r9, MemOperand(r5));
526 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset); 520 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
527 __ SmiTag(r0, r3); 521 __ SmiTag(r3, r6);
528 __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); 522 __ StoreP(r3, MemOperand(r5, kPointerSize));
523 __ addi(r5, r5, Operand(2 * kPointerSize));
529 524
530 // Initialize the fields to undefined. 525 // Initialize the fields to undefined.
531 // r1: constructor function 526 // r4: constructor function
532 // r2: First element of FixedArray (not tagged) 527 // r5: First element of FixedArray (not tagged)
533 // r3: number of elements in properties array 528 // r6: number of elements in properties array
534 // r4: JSObject 529 // r7: JSObject
535 // r5: FixedArray (not tagged) 530 // r8: FixedArray (not tagged)
536 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
537 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize); 531 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
538 { Label loop, entry; 532 {
539 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 533 Label done;
540 __ b(&entry); 534 __ cmpi(r6, Operand::Zero());
541 __ bind(&loop); 535 __ beq(&done);
542 __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); 536 if (!is_api_function || create_memento) {
543 __ bind(&entry); 537 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
544 __ cmp(r2, r6); 538 } else if (FLAG_debug_code) {
545 __ b(lt, &loop); 539 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
540 __ cmp(r10, r11);
541 __ Assert(eq, kUndefinedValueNotLoaded);
542 }
543 __ InitializeNFieldsWithFiller(r5, r6, r10);
544 __ bind(&done);
546 } 545 }
547 546
548 // Store the initialized FixedArray into the properties field of 547 // Store the initialized FixedArray into the properties field of
549 // the JSObject 548 // the JSObject
550 // r1: constructor function 549 // r4: constructor function
551 // r4: JSObject 550 // r7: JSObject
552 // r5: FixedArray (not tagged) 551 // r8: FixedArray (not tagged)
553 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag. 552 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag.
554 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset)); 553 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0);
555 554
556 // Continue with JSObject being successfully allocated 555 // Continue with JSObject being successfully allocated
557 // r1: constructor function 556 // r4: constructor function
558 // r4: JSObject 557 // r7: JSObject
559 __ jmp(&allocated); 558 __ b(&allocated);
560 559
561 // Undo the setting of the new top so that the heap is verifiable. For 560 // Undo the setting of the new top so that the heap is verifiable. For
562 // example, the map's unused properties potentially do not match the 561 // example, the map's unused properties potentially do not match the
563 // allocated objects unused properties. 562 // allocated objects unused properties.
564 // r4: JSObject (previous new top) 563 // r7: JSObject (previous new top)
565 __ bind(&undo_allocation); 564 __ bind(&undo_allocation);
566 __ UndoAllocationInNewSpace(r4, r5); 565 __ UndoAllocationInNewSpace(r7, r8);
567 } 566 }
568 567
569 // Allocate the new receiver object using the runtime call. 568 // Allocate the new receiver object using the runtime call.
570 // r1: constructor function 569 // r4: constructor function
571 __ bind(&rt_call); 570 __ bind(&rt_call);
572 if (create_memento) { 571 if (create_memento) {
573 // Get the cell or allocation site. 572 // Get the cell or allocation site.
574 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 573 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
575 __ push(r2); 574 __ push(r5);
576 } 575 }
577 576
578 __ push(r1); // argument for Runtime_NewObject 577 __ push(r4); // argument for Runtime_NewObject
579 if (create_memento) { 578 if (create_memento) {
580 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); 579 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
581 } else { 580 } else {
582 __ CallRuntime(Runtime::kNewObject, 1); 581 __ CallRuntime(Runtime::kNewObject, 1);
583 } 582 }
584 __ mov(r4, r0); 583 __ mr(r7, r3);
585 584
586 // If we ended up using the runtime, and we want a memento, then the 585 // If we ended up using the runtime, and we want a memento, then the
587 // runtime call made it for us, and we shouldn't do create count 586 // runtime call made it for us, and we shouldn't do create count
588 // increment. 587 // increment.
589 Label count_incremented; 588 Label count_incremented;
590 if (create_memento) { 589 if (create_memento) {
591 __ jmp(&count_incremented); 590 __ b(&count_incremented);
592 } 591 }
593 592
594 // Receiver for constructor call allocated. 593 // Receiver for constructor call allocated.
595 // r4: JSObject 594 // r7: JSObject
596 __ bind(&allocated); 595 __ bind(&allocated);
597 596
598 if (create_memento) { 597 if (create_memento) {
599 __ ldr(r2, MemOperand(sp, kPointerSize * 2)); 598 __ LoadP(r5, MemOperand(sp, kPointerSize * 2));
600 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); 599 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
601 __ cmp(r2, r5); 600 __ cmp(r5, r8);
602 __ b(eq, &count_incremented); 601 __ beq(&count_incremented);
603 // r2 is an AllocationSite. We are creating a memento from it, so we 602 // r5 is an AllocationSite. We are creating a memento from it, so we
604 // need to increment the memento create count. 603 // need to increment the memento create count.
605 __ ldr(r3, FieldMemOperand(r2, 604 __ LoadP(
606 AllocationSite::kPretenureCreateCountOffset)); 605 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
607 __ add(r3, r3, Operand(Smi::FromInt(1))); 606 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
608 __ str(r3, FieldMemOperand(r2, 607 __ StoreP(
609 AllocationSite::kPretenureCreateCountOffset)); 608 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
609 r0);
610 __ bind(&count_incremented); 610 __ bind(&count_incremented);
611 } 611 }
612 612
613 __ push(r4); 613 __ Push(r7, r7);
614 __ push(r4);
615 614
616 // Reload the number of arguments and the constructor from the stack. 615 // Reload the number of arguments and the constructor from the stack.
617 // sp[0]: receiver 616 // sp[0]: receiver
618 // sp[1]: receiver 617 // sp[1]: receiver
619 // sp[2]: constructor function 618 // sp[2]: constructor function
620 // sp[3]: number of arguments (smi-tagged) 619 // sp[3]: number of arguments (smi-tagged)
621 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 620 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
622 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); 621 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize));
623 622
624 // Set up pointer to last argument. 623 // Set up pointer to last argument.
625 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); 624 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
626 625
627 // Set up number of arguments for function call below 626 // Set up number of arguments for function call below
628 __ SmiUntag(r0, r3); 627 __ SmiUntag(r3, r6);
629 628
630 // Copy arguments and receiver to the expression stack. 629 // Copy arguments and receiver to the expression stack.
631 // r0: number of arguments 630 // r3: number of arguments
632 // r1: constructor function 631 // r4: constructor function
633 // r2: address of last argument (caller sp) 632 // r5: address of last argument (caller sp)
634 // r3: number of arguments (smi-tagged) 633 // r6: number of arguments (smi-tagged)
635 // sp[0]: receiver 634 // sp[0]: receiver
636 // sp[1]: receiver 635 // sp[1]: receiver
637 // sp[2]: constructor function 636 // sp[2]: constructor function
638 // sp[3]: number of arguments (smi-tagged) 637 // sp[3]: number of arguments (smi-tagged)
639 Label loop, entry; 638 Label loop, no_args;
640 __ b(&entry); 639 __ cmpi(r3, Operand::Zero());
640 __ beq(&no_args);
641 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
642 __ mtctr(r3);
641 __ bind(&loop); 643 __ bind(&loop);
642 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1)); 644 __ subi(ip, ip, Operand(kPointerSize));
643 __ push(ip); 645 __ LoadPX(r0, MemOperand(r5, ip));
644 __ bind(&entry); 646 __ push(r0);
645 __ sub(r3, r3, Operand(2), SetCC); 647 __ bdnz(&loop);
646 __ b(ge, &loop); 648 __ bind(&no_args);
647 649
648 // Call the function. 650 // Call the function.
649 // r0: number of arguments 651 // r3: number of arguments
650 // r1: constructor function 652 // r4: constructor function
651 if (is_api_function) { 653 if (is_api_function) {
652 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 654 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
653 Handle<Code> code = 655 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
654 masm->isolate()->builtins()->HandleApiCallConstruct();
655 __ Call(code, RelocInfo::CODE_TARGET); 656 __ Call(code, RelocInfo::CODE_TARGET);
656 } else { 657 } else {
657 ParameterCount actual(r0); 658 ParameterCount actual(r3);
658 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); 659 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
659 } 660 }
660 661
661 // Store offset of return address for deoptimizer. 662 // Store offset of return address for deoptimizer.
662 if (!is_api_function) { 663 if (!is_api_function) {
663 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); 664 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
664 } 665 }
665 666
666 // Restore context from the frame. 667 // Restore context from the frame.
667 // r0: result 668 // r3: result
668 // sp[0]: receiver 669 // sp[0]: receiver
669 // sp[1]: constructor function 670 // sp[1]: constructor function
670 // sp[2]: number of arguments (smi-tagged) 671 // sp[2]: number of arguments (smi-tagged)
671 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 672 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
672 673
673 // If the result is an object (in the ECMA sense), we should get rid 674 // If the result is an object (in the ECMA sense), we should get rid
674 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 675 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
675 // on page 74. 676 // on page 74.
676 Label use_receiver, exit; 677 Label use_receiver, exit;
677 678
678 // If the result is a smi, it is *not* an object in the ECMA sense. 679 // If the result is a smi, it is *not* an object in the ECMA sense.
679 // r0: result 680 // r3: result
680 // sp[0]: receiver (newly allocated object) 681 // sp[0]: receiver (newly allocated object)
681 // sp[1]: constructor function 682 // sp[1]: constructor function
682 // sp[2]: number of arguments (smi-tagged) 683 // sp[2]: number of arguments (smi-tagged)
683 __ JumpIfSmi(r0, &use_receiver); 684 __ JumpIfSmi(r3, &use_receiver);
684 685
685 // If the type of the result (stored in its map) is less than 686 // If the type of the result (stored in its map) is less than
686 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. 687 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
687 __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE); 688 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
688 __ b(ge, &exit); 689 __ bge(&exit);
689 690
690 // Throw away the result of the constructor invocation and use the 691 // Throw away the result of the constructor invocation and use the
691 // on-stack receiver as the result. 692 // on-stack receiver as the result.
692 __ bind(&use_receiver); 693 __ bind(&use_receiver);
693 __ ldr(r0, MemOperand(sp)); 694 __ LoadP(r3, MemOperand(sp));
694 695
695 // Remove receiver from the stack, remove caller arguments, and 696 // Remove receiver from the stack, remove caller arguments, and
696 // return. 697 // return.
697 __ bind(&exit); 698 __ bind(&exit);
698 // r0: result 699 // r3: result
699 // sp[0]: receiver (newly allocated object) 700 // sp[0]: receiver (newly allocated object)
700 // sp[1]: constructor function 701 // sp[1]: constructor function
701 // sp[2]: number of arguments (smi-tagged) 702 // sp[2]: number of arguments (smi-tagged)
702 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 703 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
703 704
704 // Leave construct frame. 705 // Leave construct frame.
705 } 706 }
706 707
707 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); 708 __ SmiToPtrArrayOffset(r4, r4);
708 __ add(sp, sp, Operand(kPointerSize)); 709 __ add(sp, sp, r4);
709 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); 710 __ addi(sp, sp, Operand(kPointerSize));
710 __ Jump(lr); 711 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
712 __ blr();
711 } 713 }
712 714
713 715
714 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 716 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
715 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new); 717 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
716 } 718 }
717 719
718 720
719 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 721 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
720 Generate_JSConstructStubHelper(masm, true, false); 722 Generate_JSConstructStubHelper(masm, true, false);
721 } 723 }
722 724
723 725
724 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 726 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
725 bool is_construct) { 727 bool is_construct) {
726 // Called from Generate_JS_Entry 728 // Called from Generate_JS_Entry
727 // r0: code entry 729 // r3: code entry
728 // r1: function 730 // r4: function
729 // r2: receiver 731 // r5: receiver
730 // r3: argc 732 // r6: argc
731 // r4: argv 733 // r7: argv
732 // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered 734 // r0,r8-r9, cp may be clobbered
733 ProfileEntryHookStub::MaybeCallEntryHook(masm); 735 ProfileEntryHookStub::MaybeCallEntryHook(masm);
734 736
735 // Clear the context before we push it when entering the internal frame. 737 // Clear the context before we push it when entering the internal frame.
736 __ mov(cp, Operand::Zero()); 738 __ li(cp, Operand::Zero());
737 739
738 // Enter an internal frame. 740 // Enter an internal frame.
739 { 741 {
740 FrameScope scope(masm, StackFrame::INTERNAL); 742 FrameScope scope(masm, StackFrame::INTERNAL);
741 743
742 // Set up the context from the function argument. 744 // Set up the context from the function argument.
743 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 745 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
744 746
745 __ InitializeRootRegister(); 747 __ InitializeRootRegister();
746 748
747 // Push the function and the receiver onto the stack. 749 // Push the function and the receiver onto the stack.
748 __ push(r1); 750 __ push(r4);
749 __ push(r2); 751 __ push(r5);
750 752
751 // Copy arguments to the stack in a loop. 753 // Copy arguments to the stack in a loop.
752 // r1: function 754 // r4: function
753 // r3: argc 755 // r6: argc
754 // r4: argv, i.e. points to first arg 756 // r7: argv, i.e. points to first arg
755 Label loop, entry; 757 Label loop, entry;
756 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2)); 758 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
757 // r2 points past last arg. 759 __ add(r5, r7, r0);
760 // r5 points past last arg.
758 __ b(&entry); 761 __ b(&entry);
759 __ bind(&loop); 762 __ bind(&loop);
760 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter 763 __ LoadP(r8, MemOperand(r7)); // read next parameter
761 __ ldr(r0, MemOperand(r0)); // dereference handle 764 __ addi(r7, r7, Operand(kPointerSize));
762 __ push(r0); // push parameter 765 __ LoadP(r0, MemOperand(r8)); // dereference handle
766 __ push(r0); // push parameter
763 __ bind(&entry); 767 __ bind(&entry);
764 __ cmp(r4, r2); 768 __ cmp(r7, r5);
765 __ b(ne, &loop); 769 __ bne(&loop);
766 770
767 // Initialize all JavaScript callee-saved registers, since they will be seen 771 // Initialize all JavaScript callee-saved registers, since they will be seen
768 // by the garbage collector as part of handlers. 772 // by the garbage collector as part of handlers.
769 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 773 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
770 __ mov(r5, Operand(r4)); 774 __ mr(r14, r7);
771 __ mov(r6, Operand(r4)); 775 __ mr(r15, r7);
772 if (!FLAG_enable_ool_constant_pool) { 776 __ mr(r16, r7);
773 __ mov(r8, Operand(r4)); 777 __ mr(r17, r7);
774 }
775 if (kR9Available == 1) {
776 __ mov(r9, Operand(r4));
777 }
778 778
779 // Invoke the code and pass argc as r0. 779 // Invoke the code and pass argc as r3.
780 __ mov(r0, Operand(r3)); 780 __ mr(r3, r6);
781 if (is_construct) { 781 if (is_construct) {
782 // No type feedback cell is available 782 // No type feedback cell is available
783 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 783 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
784 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); 784 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
785 __ CallStub(&stub); 785 __ CallStub(&stub);
786 } else { 786 } else {
787 ParameterCount actual(r0); 787 ParameterCount actual(r3);
788 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); 788 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
789 } 789 }
790 // Exit the JS frame and remove the parameters (except function), and 790 // Exit the JS frame and remove the parameters (except function), and
791 // return. 791 // return.
792 // Respect ABI stack constraint.
793 } 792 }
794 __ Jump(lr); 793 __ blr();
795 794
796 // r0: result 795 // r3: result
797 } 796 }
798 797
799 798
800 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 799 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
801 Generate_JSEntryTrampolineHelper(masm, false); 800 Generate_JSEntryTrampolineHelper(masm, false);
802 } 801 }
803 802
804 803
805 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 804 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
806 Generate_JSEntryTrampolineHelper(masm, true); 805 Generate_JSEntryTrampolineHelper(masm, true);
807 } 806 }
808 807
809 808
810 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 809 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
811 CallRuntimePassFunction(masm, Runtime::kCompileLazy); 810 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
812 GenerateTailCallToReturnedCode(masm); 811 GenerateTailCallToReturnedCode(masm);
813 } 812 }
814 813
815 814
816 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) { 815 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
817 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 816 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
818 // Push a copy of the function onto the stack. 817 // Push a copy of the function onto the stack.
819 __ push(r1);
820 // Push function as parameter to the runtime call. 818 // Push function as parameter to the runtime call.
821 __ Push(r1); 819 __ Push(r4, r4);
822 // Whether to compile in a background thread. 820 // Whether to compile in a background thread.
823 __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); 821 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
824 822
825 __ CallRuntime(Runtime::kCompileOptimized, 2); 823 __ CallRuntime(Runtime::kCompileOptimized, 2);
826 // Restore receiver. 824 // Restore receiver.
827 __ pop(r1); 825 __ pop(r4);
828 } 826 }
829 827
830 828
831 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 829 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
832 CallCompileOptimized(masm, false); 830 CallCompileOptimized(masm, false);
833 GenerateTailCallToReturnedCode(masm); 831 GenerateTailCallToReturnedCode(masm);
834 } 832 }
835 833
836 834
837 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 835 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
838 CallCompileOptimized(masm, true); 836 CallCompileOptimized(masm, true);
839 GenerateTailCallToReturnedCode(masm); 837 GenerateTailCallToReturnedCode(masm);
840 } 838 }
841 839
842 840
843 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { 841 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
844 // For now, we are relying on the fact that make_code_young doesn't do any 842 // For now, we are relying on the fact that make_code_young doesn't do any
845 // garbage collection which allows us to save/restore the registers without 843 // garbage collection which allows us to save/restore the registers without
846 // worrying about which of them contain pointers. We also don't build an 844 // worrying about which of them contain pointers. We also don't build an
847 // internal frame to make the code faster, since we shouldn't have to do stack 845 // internal frame to make the code faster, since we shouldn't have to do stack
848 // crawls in MakeCodeYoung. This seems a bit fragile. 846 // crawls in MakeCodeYoung. This seems a bit fragile.
849 847
848 // Point r3 at the start of the PlatformCodeAge sequence.
849 __ mr(r3, ip);
850
850 // The following registers must be saved and restored when calling through to 851 // The following registers must be saved and restored when calling through to
851 // the runtime: 852 // the runtime:
852 // r0 - contains return address (beginning of patch sequence) 853 // r3 - contains return address (beginning of patch sequence)
853 // r1 - isolate 854 // r4 - isolate
855 // lr - return address
854 FrameScope scope(masm, StackFrame::MANUAL); 856 FrameScope scope(masm, StackFrame::MANUAL);
855 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); 857 __ mflr(r0);
856 __ PrepareCallCFunction(2, 0, r2); 858 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
857 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); 859 __ PrepareCallCFunction(2, 0, r5);
860 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
858 __ CallCFunction( 861 __ CallCFunction(
859 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 862 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
860 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); 863 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
861 __ mov(pc, r0); 864 __ mtlr(r0);
865 __ mr(ip, r3);
866 __ Jump(ip);
862 } 867 }
863 868
864 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 869 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
865 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ 870 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
866 MacroAssembler* masm) { \ 871 MacroAssembler* masm) { \
867 GenerateMakeCodeYoungAgainCommon(masm); \ 872 GenerateMakeCodeYoungAgainCommon(masm); \
868 } \ 873 } \
869 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ 874 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
870 MacroAssembler* masm) { \ 875 MacroAssembler* masm) { \
871 GenerateMakeCodeYoungAgainCommon(masm); \ 876 GenerateMakeCodeYoungAgainCommon(masm); \
872 } 877 }
873 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 878 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
874 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 879 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
875 880
876 881
877 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 882 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
878 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 883 // For now, we are relying on the fact that make_code_young doesn't do any
879 // that make_code_young doesn't do any garbage collection which allows us to 884 // garbage collection which allows us to save/restore the registers without
880 // save/restore the registers without worrying about which of them contain 885 // worrying about which of them contain pointers. We also don't build an
881 // pointers. 886 // internal frame to make the code faster, since we shouldn't have to do stack
887 // crawls in MakeCodeYoung. This seems a bit fragile.
888
889 // Point r3 at the start of the PlatformCodeAge sequence.
890 __ mr(r3, ip);
882 891
883 // The following registers must be saved and restored when calling through to 892 // The following registers must be saved and restored when calling through to
884 // the runtime: 893 // the runtime:
885 // r0 - contains return address (beginning of patch sequence) 894 // r3 - contains return address (beginning of patch sequence)
886 // r1 - isolate 895 // r4 - isolate
896 // lr - return address
887 FrameScope scope(masm, StackFrame::MANUAL); 897 FrameScope scope(masm, StackFrame::MANUAL);
888 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); 898 __ mflr(r0);
889 __ PrepareCallCFunction(2, 0, r2); 899 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
890 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate()))); 900 __ PrepareCallCFunction(2, 0, r5);
891 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function( 901 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
892 masm->isolate()), 2); 902 __ CallCFunction(
893 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit()); 903 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
904 2);
905 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
906 __ mtlr(r0);
907 __ mr(ip, r3);
894 908
895 // Perform prologue operations usually performed by the young code stub. 909 // Perform prologue operations usually performed by the young code stub.
896 __ PushFixedFrame(r1); 910 __ PushFixedFrame(r4);
897 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 911 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
898 912
899 // Jump to point after the code-age stub. 913 // Jump to point after the code-age stub.
900 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength)); 914 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
901 __ mov(pc, r0); 915 __ Jump(r3);
902 } 916 }
903 917
904 918
905 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 919 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
906 GenerateMakeCodeYoungAgainCommon(masm); 920 GenerateMakeCodeYoungAgainCommon(masm);
907 } 921 }
908 922
909 923
910 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 924 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
911 SaveFPRegsMode save_doubles) { 925 SaveFPRegsMode save_doubles) {
912 { 926 {
913 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 927 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
914 928
915 // Preserve registers across notification, this is important for compiled 929 // Preserve registers across notification, this is important for compiled
916 // stubs that tail call the runtime on deopts passing their parameters in 930 // stubs that tail call the runtime on deopts passing their parameters in
917 // registers. 931 // registers.
918 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved); 932 __ MultiPush(kJSCallerSaved | kCalleeSaved);
919 // Pass the function and deoptimization type to the runtime system. 933 // Pass the function and deoptimization type to the runtime system.
920 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); 934 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
921 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved); 935 __ MultiPop(kJSCallerSaved | kCalleeSaved);
922 } 936 }
923 937
924 __ add(sp, sp, Operand(kPointerSize)); // Ignore state 938 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
925 __ mov(pc, lr); // Jump to miss handler 939 __ blr(); // Jump to miss handler
926 } 940 }
927 941
928 942
929 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 943 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
930 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 944 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
931 } 945 }
932 946
933 947
934 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 948 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
935 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 949 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
936 } 950 }
937 951
938 952
939 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 953 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
940 Deoptimizer::BailoutType type) { 954 Deoptimizer::BailoutType type) {
941 { 955 {
942 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 956 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
943 // Pass the function and deoptimization type to the runtime system. 957 // Pass the function and deoptimization type to the runtime system.
944 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type)))); 958 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
945 __ push(r0); 959 __ push(r3);
946 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 960 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
947 } 961 }
948 962
949 // Get the full codegen state from the stack and untag it -> r6. 963 // Get the full codegen state from the stack and untag it -> r9.
950 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); 964 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
951 __ SmiUntag(r6); 965 __ SmiUntag(r9);
952 // Switch on the state. 966 // Switch on the state.
953 Label with_tos_register, unknown_state; 967 Label with_tos_register, unknown_state;
954 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); 968 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
955 __ b(ne, &with_tos_register); 969 __ bne(&with_tos_register);
956 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. 970 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
957 __ Ret(); 971 __ Ret();
958 972
959 __ bind(&with_tos_register); 973 __ bind(&with_tos_register);
960 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); 974 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
961 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG)); 975 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
962 __ b(ne, &unknown_state); 976 __ bne(&unknown_state);
963 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state. 977 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
964 __ Ret(); 978 __ Ret();
965 979
966 __ bind(&unknown_state); 980 __ bind(&unknown_state);
967 __ stop("no cases left"); 981 __ stop("no cases left");
968 } 982 }
969 983
970 984
971 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 985 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
972 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 986 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
973 } 987 }
974 988
975 989
976 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 990 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
977 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 991 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
978 } 992 }
979 993
980 994
981 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 995 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
982 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 996 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
983 } 997 }
984 998
985 999
986 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1000 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
987 // Lookup the function in the JavaScript frame. 1001 // Lookup the function in the JavaScript frame.
988 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1002 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
989 { 1003 {
990 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1004 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
991 // Pass function as argument. 1005 // Pass function as argument.
992 __ push(r0); 1006 __ push(r3);
993 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1007 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
994 } 1008 }
995 1009
996 // If the code object is null, just return to the unoptimized code. 1010 // If the code object is null, just return to the unoptimized code.
997 Label skip; 1011 Label skip;
998 __ cmp(r0, Operand(Smi::FromInt(0))); 1012 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
999 __ b(ne, &skip); 1013 __ bne(&skip);
1000 __ Ret(); 1014 __ Ret();
1001 1015
1002 __ bind(&skip); 1016 __ bind(&skip);
1003 1017
1004 // Load deoptimization data from the code object. 1018 // Load deoptimization data from the code object.
1005 // <deopt_data> = <code>[#deoptimization_data_offset] 1019 // <deopt_data> = <code>[#deoptimization_data_offset]
1006 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset)); 1020 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1007 1021
1008 { ConstantPoolUnavailableScope constant_pool_unavailable(masm); 1022 #if V8_OOL_CONSTANT_POOL
1009 if (FLAG_enable_ool_constant_pool) { 1023 {
1010 __ ldr(pp, FieldMemOperand(r0, Code::kConstantPoolOffset)); 1024 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1011 } 1025 __ LoadP(kConstantPoolRegister,
1026 FieldMemOperand(r3, Code::kConstantPoolOffset));
1027 #endif
1012 1028
1013 // Load the OSR entrypoint offset from the deoptimization data. 1029 // Load the OSR entrypoint offset from the deoptimization data.
1014 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] 1030 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1015 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt( 1031 __ LoadP(r4, FieldMemOperand(
1016 DeoptimizationInputData::kOsrPcOffsetIndex))); 1032 r4, FixedArray::OffsetOfElementAt(
1033 DeoptimizationInputData::kOsrPcOffsetIndex)));
1034 __ SmiUntag(r4);
1017 1035
1018 // Compute the target address = code_obj + header_size + osr_offset 1036 // Compute the target address = code_obj + header_size + osr_offset
1019 // <entry_addr> = <code_obj> + #header_size + <osr_offset> 1037 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1020 __ add(r0, r0, Operand::SmiUntag(r1)); 1038 __ add(r3, r3, r4);
1021 __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); 1039 __ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1040 __ mtlr(r0);
1022 1041
1023 // And "return" to the OSR entry point of the function. 1042 // And "return" to the OSR entry point of the function.
1024 __ Ret(); 1043 __ Ret();
1044 #if V8_OOL_CONSTANT_POOL
1025 } 1045 }
1046 #endif
1026 } 1047 }
1027 1048
1028 1049
1029 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { 1050 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1030 // We check the stack limit as indicator that recompilation might be done. 1051 // We check the stack limit as indicator that recompilation might be done.
1031 Label ok; 1052 Label ok;
1032 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 1053 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1033 __ cmp(sp, Operand(ip)); 1054 __ cmpl(sp, ip);
1034 __ b(hs, &ok); 1055 __ bge(&ok);
1035 { 1056 {
1036 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1057 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1037 __ CallRuntime(Runtime::kStackGuard, 0); 1058 __ CallRuntime(Runtime::kStackGuard, 0);
1038 } 1059 }
1039 __ Jump(masm->isolate()->builtins()->OnStackReplacement(), 1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1040 RelocInfo::CODE_TARGET); 1061 RelocInfo::CODE_TARGET);
1041 1062
1042 __ bind(&ok); 1063 __ bind(&ok);
1043 __ Ret(); 1064 __ Ret();
1044 } 1065 }
1045 1066
1046 1067
1047 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 1068 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1048 // 1. Make sure we have at least one argument. 1069 // 1. Make sure we have at least one argument.
1049 // r0: actual number of arguments 1070 // r3: actual number of arguments
1050 { Label done; 1071 {
1051 __ cmp(r0, Operand::Zero()); 1072 Label done;
1052 __ b(ne, &done); 1073 __ cmpi(r3, Operand::Zero());
1053 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 1074 __ bne(&done);
1054 __ push(r2); 1075 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1055 __ add(r0, r0, Operand(1)); 1076 __ push(r5);
1077 __ addi(r3, r3, Operand(1));
1056 __ bind(&done); 1078 __ bind(&done);
1057 } 1079 }
1058 1080
1059 // 2. Get the function to call (passed as receiver) from the stack, check 1081 // 2. Get the function to call (passed as receiver) from the stack, check
1060 // if it is a function. 1082 // if it is a function.
1061 // r0: actual number of arguments 1083 // r3: actual number of arguments
1062 Label slow, non_function; 1084 Label slow, non_function;
1063 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1085 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1064 __ JumpIfSmi(r1, &non_function); 1086 __ add(r4, sp, r4);
1065 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1087 __ LoadP(r4, MemOperand(r4));
1066 __ b(ne, &slow); 1088 __ JumpIfSmi(r4, &non_function);
1089 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1090 __ bne(&slow);
1067 1091
1068 // 3a. Patch the first argument if necessary when calling a function. 1092 // 3a. Patch the first argument if necessary when calling a function.
1069 // r0: actual number of arguments 1093 // r3: actual number of arguments
1070 // r1: function 1094 // r4: function
1071 Label shift_arguments; 1095 Label shift_arguments;
1072 __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION 1096 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1073 { Label convert_to_object, use_global_proxy, patch_receiver; 1097 {
1098 Label convert_to_object, use_global_proxy, patch_receiver;
1074 // Change context eagerly in case we need the global receiver. 1099 // Change context eagerly in case we need the global receiver.
1075 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1100 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1076 1101
1077 // Do not transform the receiver for strict mode functions. 1102 // Do not transform the receiver for strict mode functions.
1078 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1103 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1079 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); 1104 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1080 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 1105 __ TestBit(r6,
1081 kSmiTagSize))); 1106 #if V8_TARGET_ARCH_PPC64
1082 __ b(ne, &shift_arguments); 1107 SharedFunctionInfo::kStrictModeFunction,
1108 #else
1109 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1110 #endif
1111 r0);
1112 __ bne(&shift_arguments, cr0);
1083 1113
1084 // Do not transform the receiver for native (Compilerhints already in r3). 1114 // Do not transform the receiver for native (Compilerhints already in r6).
1085 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 1115 __ TestBit(r6,
1086 __ b(ne, &shift_arguments); 1116 #if V8_TARGET_ARCH_PPC64
1117 SharedFunctionInfo::kNative,
1118 #else
1119 SharedFunctionInfo::kNative + kSmiTagSize,
1120 #endif
1121 r0);
1122 __ bne(&shift_arguments, cr0);
1087 1123
1088 // Compute the receiver in sloppy mode. 1124 // Compute the receiver in sloppy mode.
1089 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); 1125 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1090 __ ldr(r2, MemOperand(r2, -kPointerSize)); 1126 __ add(r5, sp, ip);
1091 // r0: actual number of arguments 1127 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1092 // r1: function 1128 // r3: actual number of arguments
1093 // r2: first argument 1129 // r4: function
1094 __ JumpIfSmi(r2, &convert_to_object); 1130 // r5: first argument
1131 __ JumpIfSmi(r5, &convert_to_object);
1095 1132
1096 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); 1133 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1097 __ cmp(r2, r3); 1134 __ cmp(r5, r6);
1098 __ b(eq, &use_global_proxy); 1135 __ beq(&use_global_proxy);
1099 __ LoadRoot(r3, Heap::kNullValueRootIndex); 1136 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1100 __ cmp(r2, r3); 1137 __ cmp(r5, r6);
1101 __ b(eq, &use_global_proxy); 1138 __ beq(&use_global_proxy);
1102 1139
1103 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1140 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1104 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); 1141 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1105 __ b(ge, &shift_arguments); 1142 __ bge(&shift_arguments);
1106 1143
1107 __ bind(&convert_to_object); 1144 __ bind(&convert_to_object);
1108 1145
1109 { 1146 {
1110 // Enter an internal frame in order to preserve argument count. 1147 // Enter an internal frame in order to preserve argument count.
1111 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 1148 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1112 __ SmiTag(r0); 1149 __ SmiTag(r3);
1113 __ push(r0); 1150 __ Push(r3, r5);
1151 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1152 __ mr(r5, r3);
1114 1153
1115 __ push(r2); 1154 __ pop(r3);
1116 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1155 __ SmiUntag(r3);
1117 __ mov(r2, r0);
1118
1119 __ pop(r0);
1120 __ SmiUntag(r0);
1121 1156
1122 // Exit the internal frame. 1157 // Exit the internal frame.
1123 } 1158 }
1124 1159
1125 // Restore the function to r1, and the flag to r4. 1160 // Restore the function to r4, and the flag to r7.
1126 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1161 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1127 __ mov(r4, Operand::Zero()); 1162 __ add(r7, sp, r7);
1128 __ jmp(&patch_receiver); 1163 __ LoadP(r4, MemOperand(r7));
1164 __ li(r7, Operand::Zero());
1165 __ b(&patch_receiver);
1129 1166
1130 __ bind(&use_global_proxy); 1167 __ bind(&use_global_proxy);
1131 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 1168 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1132 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); 1169 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1133 1170
1134 __ bind(&patch_receiver); 1171 __ bind(&patch_receiver);
1135 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2)); 1172 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1136 __ str(r2, MemOperand(r3, -kPointerSize)); 1173 __ add(r6, sp, ip);
1174 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1137 1175
1138 __ jmp(&shift_arguments); 1176 __ b(&shift_arguments);
1139 } 1177 }
1140 1178
1141 // 3b. Check for function proxy. 1179 // 3b. Check for function proxy.
1142 __ bind(&slow); 1180 __ bind(&slow);
1143 __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy 1181 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1144 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); 1182 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1145 __ b(eq, &shift_arguments); 1183 __ beq(&shift_arguments);
1146 __ bind(&non_function); 1184 __ bind(&non_function);
1147 __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function 1185 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1148 1186
1149 // 3c. Patch the first argument when calling a non-function. The 1187 // 3c. Patch the first argument when calling a non-function. The
1150 // CALL_NON_FUNCTION builtin expects the non-function callee as 1188 // CALL_NON_FUNCTION builtin expects the non-function callee as
1151 // receiver, so overwrite the first argument which will ultimately 1189 // receiver, so overwrite the first argument which will ultimately
1152 // become the receiver. 1190 // become the receiver.
1153 // r0: actual number of arguments 1191 // r3: actual number of arguments
1154 // r1: function 1192 // r4: function
1155 // r4: call type (0: JS function, 1: function proxy, 2: non-function) 1193 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1156 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); 1194 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1157 __ str(r1, MemOperand(r2, -kPointerSize)); 1195 __ add(r5, sp, ip);
1196 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1158 1197
1159 // 4. Shift arguments and return address one slot down on the stack 1198 // 4. Shift arguments and return address one slot down on the stack
1160 // (overwriting the original receiver). Adjust argument count to make 1199 // (overwriting the original receiver). Adjust argument count to make
1161 // the original first argument the new receiver. 1200 // the original first argument the new receiver.
1162 // r0: actual number of arguments 1201 // r3: actual number of arguments
1163 // r1: function 1202 // r4: function
1164 // r4: call type (0: JS function, 1: function proxy, 2: non-function) 1203 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1165 __ bind(&shift_arguments); 1204 __ bind(&shift_arguments);
1166 { Label loop; 1205 {
1206 Label loop;
1167 // Calculate the copy start address (destination). Copy end address is sp. 1207 // Calculate the copy start address (destination). Copy end address is sp.
1168 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); 1208 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1209 __ add(r5, sp, ip);
1169 1210
1170 __ bind(&loop); 1211 __ bind(&loop);
1171 __ ldr(ip, MemOperand(r2, -kPointerSize)); 1212 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1172 __ str(ip, MemOperand(r2)); 1213 __ StoreP(ip, MemOperand(r5));
1173 __ sub(r2, r2, Operand(kPointerSize)); 1214 __ subi(r5, r5, Operand(kPointerSize));
1174 __ cmp(r2, sp); 1215 __ cmp(r5, sp);
1175 __ b(ne, &loop); 1216 __ bne(&loop);
1176 // Adjust the actual number of arguments and remove the top element 1217 // Adjust the actual number of arguments and remove the top element
1177 // (which is a copy of the last argument). 1218 // (which is a copy of the last argument).
1178 __ sub(r0, r0, Operand(1)); 1219 __ subi(r3, r3, Operand(1));
1179 __ pop(); 1220 __ pop();
1180 } 1221 }
1181 1222
1182 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, 1223 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1183 // or a function proxy via CALL_FUNCTION_PROXY. 1224 // or a function proxy via CALL_FUNCTION_PROXY.
1184 // r0: actual number of arguments 1225 // r3: actual number of arguments
1185 // r1: function 1226 // r4: function
1186 // r4: call type (0: JS function, 1: function proxy, 2: non-function) 1227 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1187 { Label function, non_proxy; 1228 {
1188 __ tst(r4, r4); 1229 Label function, non_proxy;
1189 __ b(eq, &function); 1230 __ cmpi(r7, Operand::Zero());
1231 __ beq(&function);
1190 // Expected number of arguments is 0 for CALL_NON_FUNCTION. 1232 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1191 __ mov(r2, Operand::Zero()); 1233 __ li(r5, Operand::Zero());
1192 __ cmp(r4, Operand(1)); 1234 __ cmpi(r7, Operand(1));
1193 __ b(ne, &non_proxy); 1235 __ bne(&non_proxy);
1194 1236
1195 __ push(r1); // re-add proxy object as additional argument 1237 __ push(r4); // re-add proxy object as additional argument
1196 __ add(r0, r0, Operand(1)); 1238 __ addi(r3, r3, Operand(1));
1197 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); 1239 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1198 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1240 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1199 RelocInfo::CODE_TARGET); 1241 RelocInfo::CODE_TARGET);
1200 1242
1201 __ bind(&non_proxy); 1243 __ bind(&non_proxy);
1202 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); 1244 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1203 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1245 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1204 RelocInfo::CODE_TARGET); 1246 RelocInfo::CODE_TARGET);
1205 __ bind(&function); 1247 __ bind(&function);
1206 } 1248 }
1207 1249
1208 // 5b. Get the code to call from the function and check that the number of 1250 // 5b. Get the code to call from the function and check that the number of
1209 // expected arguments matches what we're providing. If so, jump 1251 // expected arguments matches what we're providing. If so, jump
1210 // (tail-call) to the code in register edx without checking arguments. 1252 // (tail-call) to the code in register edx without checking arguments.
1211 // r0: actual number of arguments 1253 // r3: actual number of arguments
1212 // r1: function 1254 // r4: function
1213 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1255 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1214 __ ldr(r2, 1256 __ LoadWordArith(
1215 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 1257 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1216 __ SmiUntag(r2); 1258 #if !V8_TARGET_ARCH_PPC64
1217 __ cmp(r2, r0); // Check formal and actual parameter counts. 1259 __ SmiUntag(r5);
1260 #endif
1261 __ cmp(r5, r3); // Check formal and actual parameter counts.
1218 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1262 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1219 RelocInfo::CODE_TARGET, 1263 RelocInfo::CODE_TARGET, ne);
1220 ne); 1264
1221 1265 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1222 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1223 ParameterCount expected(0); 1266 ParameterCount expected(0);
1224 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); 1267 __ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1225 } 1268 }
1226 1269
1227 1270
1228 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1271 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1229 const int kIndexOffset = 1272 const int kIndexOffset =
1230 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); 1273 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1231 const int kLimitOffset = 1274 const int kLimitOffset =
1232 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); 1275 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1233 const int kArgsOffset = 2 * kPointerSize; 1276 const int kArgsOffset = 2 * kPointerSize;
1234 const int kRecvOffset = 3 * kPointerSize; 1277 const int kRecvOffset = 3 * kPointerSize;
1235 const int kFunctionOffset = 4 * kPointerSize; 1278 const int kFunctionOffset = 4 * kPointerSize;
1236 1279
1237 { 1280 {
1238 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); 1281 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1239 1282
1240 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function 1283 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1241 __ push(r0); 1284 __ push(r3);
1242 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array 1285 __ LoadP(r3, MemOperand(fp, kArgsOffset)); // get the args array
1243 __ push(r0); 1286 __ push(r3);
1244 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 1287 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1245 1288
1246 // Check the stack for overflow. We are not trying to catch 1289 // Check the stack for overflow. We are not trying to catch
1247 // interruptions (e.g. debug break and preemption) here, so the "real stack 1290 // interruptions (e.g. debug break and preemption) here, so the "real stack
1248 // limit" is checked. 1291 // limit" is checked.
1249 Label okay; 1292 Label okay;
1250 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 1293 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1251 // Make r2 the space we have left. The stack might already be overflowed 1294 // Make r5 the space we have left. The stack might already be overflowed
1252 // here which will cause r2 to become negative. 1295 // here which will cause r5 to become negative.
1253 __ sub(r2, sp, r2); 1296 __ sub(r5, sp, r5);
1254 // Check if the arguments will overflow the stack. 1297 // Check if the arguments will overflow the stack.
1255 __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0)); 1298 __ SmiToPtrArrayOffset(r0, r3);
1256 __ b(gt, &okay); // Signed comparison. 1299 __ cmp(r5, r0);
1300 __ bgt(&okay); // Signed comparison.
1257 1301
1258 // Out of stack space. 1302 // Out of stack space.
1259 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1303 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1260 __ Push(r1, r0); 1304 __ Push(r4, r3);
1261 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 1305 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1262 // End of stack check. 1306 // End of stack check.
1263 1307
1264 // Push current limit and index. 1308 // Push current limit and index.
1265 __ bind(&okay); 1309 __ bind(&okay);
1266 __ push(r0); // limit 1310 __ li(r4, Operand::Zero());
1267 __ mov(r1, Operand::Zero()); // initial index 1311 __ Push(r3, r4); // limit and initial index.
1268 __ push(r1);
1269 1312
1270 // Get the receiver. 1313 // Get the receiver.
1271 __ ldr(r0, MemOperand(fp, kRecvOffset)); 1314 __ LoadP(r3, MemOperand(fp, kRecvOffset));
1272 1315
1273 // Check that the function is a JS function (otherwise it must be a proxy). 1316 // Check that the function is a JS function (otherwise it must be a proxy).
1274 Label push_receiver; 1317 Label push_receiver;
1275 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1318 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1276 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1319 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1277 __ b(ne, &push_receiver); 1320 __ bne(&push_receiver);
1278 1321
1279 // Change context eagerly to get the right global object if necessary. 1322 // Change context eagerly to get the right global object if necessary.
1280 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1323 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1281 // Load the shared function info while the function is still in r1. 1324 // Load the shared function info while the function is still in r4.
1282 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1325 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1283 1326
1284 // Compute the receiver. 1327 // Compute the receiver.
1285 // Do not transform the receiver for strict mode functions. 1328 // Do not transform the receiver for strict mode functions.
1286 Label call_to_object, use_global_proxy; 1329 Label call_to_object, use_global_proxy;
1287 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); 1330 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1288 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 1331 __ TestBit(r5,
1289 kSmiTagSize))); 1332 #if V8_TARGET_ARCH_PPC64
1290 __ b(ne, &push_receiver); 1333 SharedFunctionInfo::kStrictModeFunction,
1334 #else
1335 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1336 #endif
1337 r0);
1338 __ bne(&push_receiver, cr0);
1291 1339
1292 // Do not transform the receiver for strict mode functions. 1340 // Do not transform the receiver for strict mode functions.
1293 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 1341 __ TestBit(r5,
1294 __ b(ne, &push_receiver); 1342 #if V8_TARGET_ARCH_PPC64
1343 SharedFunctionInfo::kNative,
1344 #else
1345 SharedFunctionInfo::kNative + kSmiTagSize,
1346 #endif
1347 r0);
1348 __ bne(&push_receiver, cr0);
1295 1349
1296 // Compute the receiver in sloppy mode. 1350 // Compute the receiver in sloppy mode.
1297 __ JumpIfSmi(r0, &call_to_object); 1351 __ JumpIfSmi(r3, &call_to_object);
1298 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1352 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1299 __ cmp(r0, r1); 1353 __ cmp(r3, r4);
1300 __ b(eq, &use_global_proxy); 1354 __ beq(&use_global_proxy);
1301 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 1355 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1302 __ cmp(r0, r1); 1356 __ cmp(r3, r4);
1303 __ b(eq, &use_global_proxy); 1357 __ beq(&use_global_proxy);
1304 1358
1305 // Check if the receiver is already a JavaScript object. 1359 // Check if the receiver is already a JavaScript object.
1306 // r0: receiver 1360 // r3: receiver
1307 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1361 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1308 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1362 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1309 __ b(ge, &push_receiver); 1363 __ bge(&push_receiver);
1310 1364
1311 // Convert the receiver to a regular object. 1365 // Convert the receiver to a regular object.
1312 // r0: receiver 1366 // r3: receiver
1313 __ bind(&call_to_object); 1367 __ bind(&call_to_object);
1314 __ push(r0); 1368 __ push(r3);
1315 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1369 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1316 __ b(&push_receiver); 1370 __ b(&push_receiver);
1317 1371
1318 __ bind(&use_global_proxy); 1372 __ bind(&use_global_proxy);
1319 __ ldr(r0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 1373 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1320 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalProxyOffset)); 1374 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1321 1375
1322 // Push the receiver. 1376 // Push the receiver.
1323 // r0: receiver 1377 // r3: receiver
1324 __ bind(&push_receiver); 1378 __ bind(&push_receiver);
1325 __ push(r0); 1379 __ push(r3);
1326 1380
1327 // Copy all arguments from the array to the stack. 1381 // Copy all arguments from the array to the stack.
1328 Label entry, loop; 1382 Label entry, loop;
1329 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1383 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1330 __ b(&entry); 1384 __ b(&entry);
1331 1385
1332 // Load the current argument from the arguments array and push it to the 1386 // Load the current argument from the arguments array and push it to the
1333 // stack. 1387 // stack.
1334 // r0: current argument index 1388 // r3: current argument index
1335 __ bind(&loop); 1389 __ bind(&loop);
1336 __ ldr(r1, MemOperand(fp, kArgsOffset)); 1390 __ LoadP(r4, MemOperand(fp, kArgsOffset));
1337 __ Push(r1, r0); 1391 __ Push(r4, r3);
1338 1392
1339 // Call the runtime to access the property in the arguments array. 1393 // Call the runtime to access the property in the arguments array.
1340 __ CallRuntime(Runtime::kGetProperty, 2); 1394 __ CallRuntime(Runtime::kGetProperty, 2);
1341 __ push(r0); 1395 __ push(r3);
1342 1396
1343 // Use inline caching to access the arguments. 1397 // Use inline caching to access the arguments.
1344 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1398 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1345 __ add(r0, r0, Operand(1 << kSmiTagSize)); 1399 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1346 __ str(r0, MemOperand(fp, kIndexOffset)); 1400 __ StoreP(r3, MemOperand(fp, kIndexOffset));
1347 1401
1348 // Test if the copy loop has finished copying all the elements from the 1402 // Test if the copy loop has finished copying all the elements from the
1349 // arguments object. 1403 // arguments object.
1350 __ bind(&entry); 1404 __ bind(&entry);
1351 __ ldr(r1, MemOperand(fp, kLimitOffset)); 1405 __ LoadP(r4, MemOperand(fp, kLimitOffset));
1352 __ cmp(r0, r1); 1406 __ cmp(r3, r4);
1353 __ b(ne, &loop); 1407 __ bne(&loop);
1354 1408
1355 // Call the function. 1409 // Call the function.
1356 Label call_proxy; 1410 Label call_proxy;
1357 ParameterCount actual(r0); 1411 ParameterCount actual(r3);
1358 __ SmiUntag(r0); 1412 __ SmiUntag(r3);
1359 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1413 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1360 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1414 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1361 __ b(ne, &call_proxy); 1415 __ bne(&call_proxy);
1362 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); 1416 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1363 1417
1364 frame_scope.GenerateLeaveFrame(); 1418 __ LeaveFrame(StackFrame::INTERNAL, 3 * kPointerSize);
1365 __ add(sp, sp, Operand(3 * kPointerSize)); 1419 __ blr();
1366 __ Jump(lr);
1367 1420
1368 // Call the function proxy. 1421 // Call the function proxy.
1369 __ bind(&call_proxy); 1422 __ bind(&call_proxy);
1370 __ push(r1); // add function proxy as last argument 1423 __ push(r4); // add function proxy as last argument
1371 __ add(r0, r0, Operand(1)); 1424 __ addi(r3, r3, Operand(1));
1372 __ mov(r2, Operand::Zero()); 1425 __ li(r5, Operand::Zero());
1373 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); 1426 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1374 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1427 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1375 RelocInfo::CODE_TARGET); 1428 RelocInfo::CODE_TARGET);
1376 1429
1377 // Tear down the internal frame and remove function, receiver and args. 1430 // Tear down the internal frame and remove function, receiver and args.
1378 } 1431 }
1379 __ add(sp, sp, Operand(3 * kPointerSize)); 1432 __ addi(sp, sp, Operand(3 * kPointerSize));
1380 __ Jump(lr); 1433 __ blr();
1381 } 1434 }
1382 1435
1383 1436
1384 static void ArgumentAdaptorStackCheck(MacroAssembler* masm, 1437 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1385 Label* stack_overflow) { 1438 Label* stack_overflow) {
1386 // ----------- S t a t e ------------- 1439 // ----------- S t a t e -------------
1387 // -- r0 : actual number of arguments 1440 // -- r3 : actual number of arguments
1388 // -- r1 : function (passed through to callee) 1441 // -- r4 : function (passed through to callee)
1389 // -- r2 : expected number of arguments 1442 // -- r5 : expected number of arguments
1390 // ----------------------------------- 1443 // -----------------------------------
1391 // Check the stack for overflow. We are not trying to catch 1444 // Check the stack for overflow. We are not trying to catch
1392 // interruptions (e.g. debug break and preemption) here, so the "real stack 1445 // interruptions (e.g. debug break and preemption) here, so the "real stack
1393 // limit" is checked. 1446 // limit" is checked.
1394 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); 1447 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1395 // Make r5 the space we have left. The stack might already be overflowed 1448 // Make r8 the space we have left. The stack might already be overflowed
1396 // here which will cause r5 to become negative. 1449 // here which will cause r8 to become negative.
1397 __ sub(r5, sp, r5); 1450 __ sub(r8, sp, r8);
1398 // Check if the arguments will overflow the stack. 1451 // Check if the arguments will overflow the stack.
1399 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2)); 1452 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1400 __ b(le, stack_overflow); // Signed comparison. 1453 __ cmp(r8, r0);
1454 __ ble(stack_overflow); // Signed comparison.
1401 } 1455 }
1402 1456
1403 1457
1404 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1458 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1405 __ SmiTag(r0); 1459 __ SmiTag(r3);
1406 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1460 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1407 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | 1461 __ mflr(r0);
1408 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) | 1462 __ push(r0);
1409 fp.bit() | lr.bit()); 1463 #if V8_OOL_CONSTANT_POOL
1410 __ add(fp, sp, 1464 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1411 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); 1465 #else
1466 __ Push(fp, r7, r4, r3);
1467 #endif
1468 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1469 kPointerSize));
1412 } 1470 }
1413 1471
1414 1472
1415 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1473 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1416 // ----------- S t a t e ------------- 1474 // ----------- S t a t e -------------
1417 // -- r0 : result being passed through 1475 // -- r3 : result being passed through
1418 // ----------------------------------- 1476 // -----------------------------------
1419 // Get the number of arguments passed (as a smi), tear down the frame and 1477 // Get the number of arguments passed (as a smi), tear down the frame and
1420 // then tear down the parameters. 1478 // then tear down the parameters.
1421 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + 1479 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1422 kPointerSize))); 1480 kPointerSize)));
1423 1481 int stack_adjustment = kPointerSize; // adjust for receiver
1424 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR); 1482 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1425 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1)); 1483 __ SmiToPtrArrayOffset(r0, r4);
1426 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver 1484 __ add(sp, sp, r0);
1427 } 1485 }
1428 1486
1429 1487
1430 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1488 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1431 // ----------- S t a t e ------------- 1489 // ----------- S t a t e -------------
1432 // -- r0 : actual number of arguments 1490 // -- r3 : actual number of arguments
1433 // -- r1 : function (passed through to callee) 1491 // -- r4 : function (passed through to callee)
1434 // -- r2 : expected number of arguments 1492 // -- r5 : expected number of arguments
1435 // ----------------------------------- 1493 // -----------------------------------
1436 1494
1437 Label stack_overflow; 1495 Label stack_overflow;
1438 ArgumentAdaptorStackCheck(masm, &stack_overflow); 1496 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1439 Label invoke, dont_adapt_arguments; 1497 Label invoke, dont_adapt_arguments;
1440 1498
1441 Label enough, too_few; 1499 Label enough, too_few;
1442 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 1500 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1443 __ cmp(r0, r2); 1501 __ cmp(r3, r5);
1444 __ b(lt, &too_few); 1502 __ blt(&too_few);
1445 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); 1503 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1446 __ b(eq, &dont_adapt_arguments); 1504 __ beq(&dont_adapt_arguments);
1447 1505
1448 { // Enough parameters: actual >= expected 1506 { // Enough parameters: actual >= expected
1449 __ bind(&enough); 1507 __ bind(&enough);
1450 EnterArgumentsAdaptorFrame(masm); 1508 EnterArgumentsAdaptorFrame(masm);
1451 1509
1452 // Calculate copy start address into r0 and copy end address into r2. 1510 // Calculate copy start address into r3 and copy end address into r5.
1453 // r0: actual number of arguments as a smi 1511 // r3: actual number of arguments as a smi
1454 // r1: function 1512 // r4: function
1455 // r2: expected number of arguments 1513 // r5: expected number of arguments
1456 // r3: code entry to call 1514 // ip: code entry to call
1457 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); 1515 __ SmiToPtrArrayOffset(r3, r3);
1516 __ add(r3, r3, fp);
1458 // adjust for return address and receiver 1517 // adjust for return address and receiver
1459 __ add(r0, r0, Operand(2 * kPointerSize)); 1518 __ addi(r3, r3, Operand(2 * kPointerSize));
1460 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2)); 1519 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1520 __ sub(r5, r3, r5);
1461 1521
1462 // Copy the arguments (including the receiver) to the new stack frame. 1522 // Copy the arguments (including the receiver) to the new stack frame.
1463 // r0: copy start address 1523 // r3: copy start address
1464 // r1: function 1524 // r4: function
1465 // r2: copy end address 1525 // r5: copy end address
1466 // r3: code entry to call 1526 // ip: code entry to call
1467 1527
1468 Label copy; 1528 Label copy;
1469 __ bind(&copy); 1529 __ bind(&copy);
1470 __ ldr(ip, MemOperand(r0, 0)); 1530 __ LoadP(r0, MemOperand(r3, 0));
1471 __ push(ip); 1531 __ push(r0);
1472 __ cmp(r0, r2); // Compare before moving to next argument. 1532 __ cmp(r3, r5); // Compare before moving to next argument.
1473 __ sub(r0, r0, Operand(kPointerSize)); 1533 __ subi(r3, r3, Operand(kPointerSize));
1474 __ b(ne, &copy); 1534 __ bne(&copy);
1475 1535
1476 __ b(&invoke); 1536 __ b(&invoke);
1477 } 1537 }
1478 1538
1479 { // Too few parameters: Actual < expected 1539 { // Too few parameters: Actual < expected
1480 __ bind(&too_few); 1540 __ bind(&too_few);
1481 EnterArgumentsAdaptorFrame(masm); 1541 EnterArgumentsAdaptorFrame(masm);
1482 1542
1483 // Calculate copy start address into r0 and copy end address is fp. 1543 // Calculate copy start address into r0 and copy end address is fp.
1484 // r0: actual number of arguments as a smi 1544 // r3: actual number of arguments as a smi
1485 // r1: function 1545 // r4: function
1486 // r2: expected number of arguments 1546 // r5: expected number of arguments
1487 // r3: code entry to call 1547 // ip: code entry to call
1488 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0)); 1548 __ SmiToPtrArrayOffset(r3, r3);
1549 __ add(r3, r3, fp);
1489 1550
1490 // Copy the arguments (including the receiver) to the new stack frame. 1551 // Copy the arguments (including the receiver) to the new stack frame.
1491 // r0: copy start address 1552 // r3: copy start address
1492 // r1: function 1553 // r4: function
1493 // r2: expected number of arguments 1554 // r5: expected number of arguments
1494 // r3: code entry to call 1555 // ip: code entry to call
1495 Label copy; 1556 Label copy;
1496 __ bind(&copy); 1557 __ bind(&copy);
1497 // Adjust load for return address and receiver. 1558 // Adjust load for return address and receiver.
1498 __ ldr(ip, MemOperand(r0, 2 * kPointerSize)); 1559 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
1499 __ push(ip); 1560 __ push(r0);
1500 __ cmp(r0, fp); // Compare before moving to next argument. 1561 __ cmp(r3, fp); // Compare before moving to next argument.
1501 __ sub(r0, r0, Operand(kPointerSize)); 1562 __ subi(r3, r3, Operand(kPointerSize));
1502 __ b(ne, &copy); 1563 __ bne(&copy);
1503 1564
1504 // Fill the remaining expected arguments with undefined. 1565 // Fill the remaining expected arguments with undefined.
1505 // r1: function 1566 // r4: function
1506 // r2: expected number of arguments 1567 // r5: expected number of arguments
1507 // r3: code entry to call 1568 // ip: code entry to call
1508 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1569 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1509 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2)); 1570 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1571 __ sub(r5, fp, r5);
1510 // Adjust for frame. 1572 // Adjust for frame.
1511 __ sub(r2, r2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + 1573 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1512 2 * kPointerSize)); 1574 2 * kPointerSize));
1513 1575
1514 Label fill; 1576 Label fill;
1515 __ bind(&fill); 1577 __ bind(&fill);
1516 __ push(ip); 1578 __ push(r0);
1517 __ cmp(sp, r2); 1579 __ cmp(sp, r5);
1518 __ b(ne, &fill); 1580 __ bne(&fill);
1519 } 1581 }
1520 1582
1521 // Call the entry point. 1583 // Call the entry point.
1522 __ bind(&invoke); 1584 __ bind(&invoke);
1523 __ Call(r3); 1585 __ CallJSEntry(ip);
1524 1586
1525 // Store offset of return address for deoptimizer. 1587 // Store offset of return address for deoptimizer.
1526 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 1588 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1527 1589
1528 // Exit frame and return. 1590 // Exit frame and return.
1529 LeaveArgumentsAdaptorFrame(masm); 1591 LeaveArgumentsAdaptorFrame(masm);
1530 __ Jump(lr); 1592 __ blr();
1531 1593
1532 1594
1533 // ------------------------------------------- 1595 // -------------------------------------------
1534 // Dont adapt arguments. 1596 // Dont adapt arguments.
1535 // ------------------------------------------- 1597 // -------------------------------------------
1536 __ bind(&dont_adapt_arguments); 1598 __ bind(&dont_adapt_arguments);
1537 __ Jump(r3); 1599 __ JumpToJSEntry(ip);
1538 1600
1539 __ bind(&stack_overflow); 1601 __ bind(&stack_overflow);
1540 { 1602 {
1541 FrameScope frame(masm, StackFrame::MANUAL); 1603 FrameScope frame(masm, StackFrame::MANUAL);
1542 EnterArgumentsAdaptorFrame(masm); 1604 EnterArgumentsAdaptorFrame(masm);
1543 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 1605 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1544 __ bkpt(0); 1606 __ bkpt(0);
1545 } 1607 }
1546 } 1608 }
1547 1609
1548 1610
1549 #undef __ 1611 #undef __
1612 }
1613 } // namespace v8::internal
1550 1614
1551 } } // namespace v8::internal 1615 #endif // V8_TARGET_ARCH_PPC
1552
1553 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/ppc/assembler-ppc-inl.h ('k') | src/ppc/code-stubs-ppc.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698