Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/ppc/builtins-ppc.cc

Issue 422063005: Contribution of PowerPC port. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: re-upload - catch up to 8/19 level Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 //
3 // Copyright IBM Corp. 2012, 2013. All rights reserved.
4 //
5 // Use of this source code is governed by a BSD-style license that can be
6 // found in the LICENSE file.
7
8 #include "src/v8.h"
9
10 #if V8_TARGET_ARCH_PPC
11
12 #include "src/codegen.h"
13 #include "src/debug.h"
14 #include "src/deoptimizer.h"
15 #include "src/full-codegen.h"
16 #include "src/runtime.h"
17 #include "src/stub-cache.h"
18
19 namespace v8 {
20 namespace internal {
21
22
23 #define __ ACCESS_MASM(masm)
24
25 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
26 BuiltinExtraArguments extra_args) {
27 // ----------- S t a t e -------------
28 // -- r3 : number of arguments excluding receiver
29 // -- r4 : called function (only guaranteed when
30 // extra_args requires it)
31 // -- cp : context
32 // -- sp[0] : last argument
33 // -- ...
34 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
35 // -- sp[4 * argc] : receiver
36 // -----------------------------------
37
38 // Insert extra arguments.
39 int num_extra_args = 0;
40 if (extra_args == NEEDS_CALLED_FUNCTION) {
41 num_extra_args = 1;
42 __ push(r4);
43 } else {
44 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 }
46
47 // JumpToExternalReference expects r0 to contain the number of arguments
48 // including the receiver and the extra arguments.
49 __ addi(r3, r3, Operand(num_extra_args + 1));
50 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
51 }
52
53
54 // Load the built-in InternalArray function from the current context.
55 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
56 Register result) {
57 // Load the native context.
58
59 __ LoadP(result,
60 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
61 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
62 // Load the InternalArray function from the native context.
63 __ LoadP(result,
64 MemOperand(result, Context::SlotOffset(
65 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
66 }
67
68
69 // Load the built-in Array function from the current context.
70 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
71 // Load the native context.
72
73 __ LoadP(result,
74 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
75 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
76 // Load the Array function from the native context.
77 __ LoadP(
78 result,
79 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
80 }
81
82
83 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- r3 : number of arguments
86 // -- lr : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
90
91 // Get the InternalArray function.
92 GenerateLoadInternalArrayFunction(masm, r4);
93
94 if (FLAG_debug_code) {
95 // Initial map for the builtin InternalArray functions should be maps.
96 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
97 __ TestIfSmi(r5, r0);
98 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
99 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
100 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
101 }
102
103 // Run the native code for the InternalArray function called as a normal
104 // function.
105 // tail call a stub
106 InternalArrayConstructorStub stub(masm->isolate());
107 __ TailCallStub(&stub);
108 }
109
110
111 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
112 // ----------- S t a t e -------------
113 // -- r3 : number of arguments
114 // -- lr : return address
115 // -- sp[...]: constructor arguments
116 // -----------------------------------
117 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
118
119 // Get the Array function.
120 GenerateLoadArrayFunction(masm, r4);
121
122 if (FLAG_debug_code) {
123 // Initial map for the builtin Array functions should be maps.
124 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
125 __ TestIfSmi(r5, r0);
126 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
127 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
128 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
129 }
130
131 // Run the native code for the Array function called as a normal function.
132 // tail call a stub
133 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
134 ArrayConstructorStub stub(masm->isolate());
135 __ TailCallStub(&stub);
136 }
137
138
139 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
140 // ----------- S t a t e -------------
141 // -- r3 : number of arguments
142 // -- r4 : constructor function
143 // -- lr : return address
144 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
145 // -- sp[argc * 4] : receiver
146 // -----------------------------------
147 Counters* counters = masm->isolate()->counters();
148 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
149
150 Register function = r4;
151 if (FLAG_debug_code) {
152 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
153 __ cmp(function, r5);
154 __ Assert(eq, kUnexpectedStringFunction);
155 }
156
157 // Load the first arguments in r3 and get rid of the rest.
158 Label no_arguments;
159 __ cmpi(r3, Operand::Zero());
160 __ beq(&no_arguments);
161 // First args = sp[(argc - 1) * 4].
162 __ subi(r3, r3, Operand(1));
163 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
164 __ add(sp, sp, r3);
165 __ LoadP(r3, MemOperand(sp));
166 // sp now point to args[0], drop args[0] + receiver.
167 __ Drop(2);
168
169 Register argument = r5;
170 Label not_cached, argument_is_string;
171 __ LookupNumberStringCache(r3, // Input.
172 argument, // Result.
173 r6, // Scratch.
174 r7, // Scratch.
175 r8, // Scratch.
176 &not_cached);
177 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
178 __ bind(&argument_is_string);
179
180 // ----------- S t a t e -------------
181 // -- r5 : argument converted to string
182 // -- r4 : constructor function
183 // -- lr : return address
184 // -----------------------------------
185
186 Label gc_required;
187 __ Allocate(JSValue::kSize,
188 r3, // Result.
189 r6, // Scratch.
190 r7, // Scratch.
191 &gc_required, TAG_OBJECT);
192
193 // Initialising the String Object.
194 Register map = r6;
195 __ LoadGlobalFunctionInitialMap(function, map, r7);
196 if (FLAG_debug_code) {
197 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
198 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
199 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
200 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
201 __ cmpi(r7, Operand::Zero());
202 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
203 }
204 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
205
206 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
207 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
208 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
209
210 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
211
212 // Ensure the object is fully initialized.
213 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
214
215 __ Ret();
216
217 // The argument was not found in the number to string cache. Check
218 // if it's a string already before calling the conversion builtin.
219 Label convert_argument;
220 __ bind(&not_cached);
221 __ JumpIfSmi(r3, &convert_argument);
222
223 // Is it a String?
224 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
225 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
226 STATIC_ASSERT(kNotStringTag != 0);
227 __ andi(r0, r6, Operand(kIsNotStringMask));
228 __ bne(&convert_argument, cr0);
229 __ mr(argument, r3);
230 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
231 __ b(&argument_is_string);
232
233 // Invoke the conversion builtin and put the result into r5.
234 __ bind(&convert_argument);
235 __ push(function); // Preserve the function.
236 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
237 {
238 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
239 __ push(r3);
240 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
241 }
242 __ pop(function);
243 __ mr(argument, r3);
244 __ b(&argument_is_string);
245
246 // Load the empty string into r5, remove the receiver from the
247 // stack, and jump back to the case where the argument is a string.
248 __ bind(&no_arguments);
249 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
250 __ Drop(1);
251 __ b(&argument_is_string);
252
253 // At this point the argument is already a string. Call runtime to
254 // create a string wrapper.
255 __ bind(&gc_required);
256 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
257 {
258 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
259 __ push(argument);
260 __ CallRuntime(Runtime::kNewStringWrapper, 1);
261 }
262 __ Ret();
263 }
264
265
266 static void CallRuntimePassFunction(MacroAssembler* masm,
267 Runtime::FunctionId function_id) {
268 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
269 // Push a copy of the function onto the stack.
270 // Push function as parameter to the runtime call.
271 __ Push(r4, r4);
272
273 __ CallRuntime(function_id, 1);
274 // Restore reciever.
275 __ Pop(r4);
276 }
277
278
279 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
280 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
281 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset));
282 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
283 __ Jump(r5);
284 }
285
286
287 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
288 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
289 __ Jump(r3);
290 }
291
292
293 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
294 // Checking whether the queued function is ready for install is optional,
295 // since we come across interrupts and stack checks elsewhere. However,
296 // not checking may delay installing ready functions, and always checking
297 // would be quite expensive. A good compromise is to first check against
298 // stack limit as a cue for an interrupt signal.
299 Label ok;
300 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
301 __ cmpl(sp, ip);
302 __ bge(&ok);
303
304 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
305 GenerateTailCallToReturnedCode(masm);
306
307 __ bind(&ok);
308 GenerateTailCallToSharedCode(masm);
309 }
310
311
312 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
313 bool is_api_function,
314 bool create_memento) {
315 // ----------- S t a t e -------------
316 // -- r3 : number of arguments
317 // -- r4 : constructor function
318 // -- r5 : allocation site or undefined
319 // -- lr : return address
320 // -- sp[...]: constructor arguments
321 // -----------------------------------
322
323 // Should never create mementos for api functions.
324 DCHECK(!is_api_function || !create_memento);
325
326 Isolate* isolate = masm->isolate();
327
328 // Enter a construct frame.
329 {
330 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
331
332 if (create_memento) {
333 __ AssertUndefinedOrAllocationSite(r5, r6);
334 __ push(r5);
335 }
336
337 // Preserve the two incoming parameters on the stack.
338 __ SmiTag(r3);
339 __ push(r3); // Smi-tagged arguments count.
340 __ push(r4); // Constructor function.
341
342 // Try to allocate the object without transitioning into C code. If any of
343 // the preconditions is not met, the code bails out to the runtime call.
344 Label rt_call, allocated;
345 if (FLAG_inline_new) {
346 Label undo_allocation;
347 ExternalReference debug_step_in_fp =
348 ExternalReference::debug_step_in_fp_address(isolate);
349 __ mov(r5, Operand(debug_step_in_fp));
350 __ LoadP(r5, MemOperand(r5));
351 __ cmpi(r5, Operand::Zero());
352 __ bne(&rt_call);
353
354 // Load the initial map and verify that it is in fact a map.
355 // r4: constructor function
356 __ LoadP(r5,
357 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
358 __ JumpIfSmi(r5, &rt_call);
359 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
360 __ bne(&rt_call);
361
362 // Check that the constructor is not constructing a JSFunction (see
363 // comments in Runtime_NewObject in runtime.cc). In which case the
364 // initial map's instance type would be JS_FUNCTION_TYPE.
365 // r4: constructor function
366 // r5: initial map
367 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE);
368 __ beq(&rt_call);
369
370 if (!is_api_function) {
371 Label allocate;
372 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
373 // Check if slack tracking is enabled.
374 __ lwz(r7, bit_field3);
375 __ DecodeField<Map::ConstructionCount>(r11, r7);
376 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
377 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking
378 __ beq(&allocate);
379 // Decrease generous allocation count.
380 __ Add(r7, r7, -(1 << Map::ConstructionCount::kShift), r0);
381 __ stw(r7, bit_field3);
382 __ cmpi(r11, Operand(JSFunction::kFinishSlackTracking));
383 __ bne(&allocate);
384
385 __ push(r4);
386
387 __ Push(r5, r4); // r4 = constructor
388 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
389
390 __ Pop(r4, r5);
391
392 __ bind(&allocate);
393 }
394
395 // Now allocate the JSObject on the heap.
396 // r4: constructor function
397 // r5: initial map
398 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
399 if (create_memento) {
400 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
401 }
402
403 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS);
404
405 // Allocated the JSObject, now initialize the fields. Map is set to
406 // initial map and properties and elements are set to empty fixed array.
407 // r4: constructor function
408 // r5: initial map
409 // r6: object size (not including memento if create_memento)
410 // r7: JSObject (not tagged)
411 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
412 __ mr(r8, r7);
413 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
414 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
415 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
416 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
417
418 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
419 __ add(r9, r7, r9); // End of object.
420
421 // Fill all the in-object properties with the appropriate filler.
422 // r4: constructor function
423 // r5: initial map
424 // r6: object size (in words, including memento if create_memento)
425 // r7: JSObject (not tagged)
426 // r8: First in-object property of JSObject (not tagged)
427 // r9: End of object
428 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
429 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
430
431 if (!is_api_function) {
432 Label no_inobject_slack_tracking;
433
434 // Check if slack tracking is enabled.
435 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
436 __ cmpi(r11, Operand::Zero()); // JSFunction::kNoSlackTracking
437 __ beq(&no_inobject_slack_tracking);
438
439 // Allocate object with a slack.
440 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
441 if (FLAG_debug_code) {
442 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
443 __ add(r0, r8, r0);
444 // r0: offset of first field after pre-allocated fields
445 __ cmp(r0, r9);
446 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
447 }
448 {
449 Label done;
450 __ cmpi(r3, Operand::Zero());
451 __ beq(&done);
452 __ InitializeNFieldsWithFiller(r8, r3, r10);
453 __ bind(&done);
454 }
455 // To allow for truncation.
456 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
457 // Fill the remaining fields with one pointer filler map.
458
459 __ bind(&no_inobject_slack_tracking);
460 }
461
462 if (create_memento) {
463 __ subi(r3, r9, Operand(AllocationMemento::kSize));
464 __ InitializeFieldsWithFiller(r8, r3, r10);
465
466 // Fill in memento fields.
467 // r8: points to the allocated but uninitialized memento.
468 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
469 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
470 // Load the AllocationSite
471 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize));
472 __ StoreP(r10,
473 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
474 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
475 kPointerSize));
476 } else {
477 __ InitializeFieldsWithFiller(r8, r9, r10);
478 }
479
480 // Add the object tag to make the JSObject real, so that we can continue
481 // and jump into the continuation code at any time from now on. Any
482 // failures need to undo the allocation, so that the heap is in a
483 // consistent state and verifiable.
484 __ addi(r7, r7, Operand(kHeapObjectTag));
485
486 // Check if a non-empty properties array is needed. Continue with
487 // allocated object if not fall through to runtime call if it is.
488 // r4: constructor function
489 // r7: JSObject
490 // r8: start of next object (not tagged)
491 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
492 // The field instance sizes contains both pre-allocated property fields
493 // and in-object properties.
494 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
495 __ add(r6, r6, r0);
496 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset));
497 __ sub(r6, r6, r0, LeaveOE, SetRC);
498
499 // Done if no extra properties are to be allocated.
500 __ beq(&allocated, cr0);
501 __ Assert(ge, kPropertyAllocationCountFailed, cr0);
502
503 // Scale the number of elements by pointer size and add the header for
504 // FixedArrays to the start of the next object calculation from above.
505 // r4: constructor
506 // r6: number of elements in properties array
507 // r7: JSObject
508 // r8: start of next object
509 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize));
510 __ Allocate(
511 r3, r8, r9, r5, &undo_allocation,
512 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
513
514 // Initialize the FixedArray.
515 // r4: constructor
516 // r6: number of elements in properties array
517 // r7: JSObject
518 // r8: FixedArray (not tagged)
519 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
520 __ mr(r5, r8);
521 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
522 __ StoreP(r9, MemOperand(r5));
523 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
524 __ SmiTag(r3, r6);
525 __ StorePU(r3, MemOperand(r5, kPointerSize));
526 __ addi(r5, r5, Operand(kPointerSize));
527
528 // Initialize the fields to undefined.
529 // r4: constructor function
530 // r5: First element of FixedArray (not tagged)
531 // r6: number of elements in properties array
532 // r7: JSObject
533 // r8: FixedArray (not tagged)
534 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
535 {
536 Label done;
537 __ cmpi(r6, Operand::Zero());
538 __ beq(&done);
539 if (!is_api_function || create_memento) {
540 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
541 } else if (FLAG_debug_code) {
542 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
543 __ cmp(r10, r11);
544 __ Assert(eq, kUndefinedValueNotLoaded);
545 }
546 __ InitializeNFieldsWithFiller(r5, r6, r10);
547 __ bind(&done);
548 }
549
550 // Store the initialized FixedArray into the properties field of
551 // the JSObject
552 // r4: constructor function
553 // r7: JSObject
554 // r8: FixedArray (not tagged)
555 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag.
556 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0);
557
558 // Continue with JSObject being successfully allocated
559 // r4: constructor function
560 // r7: JSObject
561 __ b(&allocated);
562
563 // Undo the setting of the new top so that the heap is verifiable. For
564 // example, the map's unused properties potentially do not match the
565 // allocated objects unused properties.
566 // r7: JSObject (previous new top)
567 __ bind(&undo_allocation);
568 __ UndoAllocationInNewSpace(r7, r8);
569 }
570
571 // Allocate the new receiver object using the runtime call.
572 // r4: constructor function
573 __ bind(&rt_call);
574 if (create_memento) {
575 // Get the cell or allocation site.
576 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
577 __ push(r5);
578 }
579
580 __ push(r4); // argument for Runtime_NewObject
581 if (create_memento) {
582 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
583 } else {
584 __ CallRuntime(Runtime::kNewObject, 1);
585 }
586 __ mr(r7, r3);
587
588 // If we ended up using the runtime, and we want a memento, then the
589 // runtime call made it for us, and we shouldn't do create count
590 // increment.
591 Label count_incremented;
592 if (create_memento) {
593 __ b(&count_incremented);
594 }
595
596 // Receiver for constructor call allocated.
597 // r7: JSObject
598 __ bind(&allocated);
599
600 if (create_memento) {
601 __ LoadP(r5, MemOperand(sp, kPointerSize * 2));
602 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
603 __ cmp(r5, r8);
604 __ beq(&count_incremented);
605 // r5 is an AllocationSite. We are creating a memento from it, so we
606 // need to increment the memento create count.
607 __ LoadP(
608 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
609 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
610 __ StoreP(
611 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
612 r0);
613 __ bind(&count_incremented);
614 }
615
616 __ Push(r7, r7);
617
618 // Reload the number of arguments and the constructor from the stack.
619 // sp[0]: receiver
620 // sp[1]: receiver
621 // sp[2]: constructor function
622 // sp[3]: number of arguments (smi-tagged)
623 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
624 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize));
625
626 // Set up pointer to last argument.
627 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
628
629 // Set up number of arguments for function call below
630 __ SmiUntag(r3, r6);
631
632 // Copy arguments and receiver to the expression stack.
633 // r3: number of arguments
634 // r4: constructor function
635 // r5: address of last argument (caller sp)
636 // r6: number of arguments (smi-tagged)
637 // sp[0]: receiver
638 // sp[1]: receiver
639 // sp[2]: constructor function
640 // sp[3]: number of arguments (smi-tagged)
641 Label loop, no_args;
642 __ cmpi(r3, Operand::Zero());
643 __ beq(&no_args);
644 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
645 __ mtctr(r3);
646 __ bind(&loop);
647 __ subi(ip, ip, Operand(kPointerSize));
648 __ LoadPX(r0, MemOperand(r5, ip));
649 __ push(r0);
650 __ bdnz(&loop);
651 __ bind(&no_args);
652
653 // Call the function.
654 // r3: number of arguments
655 // r4: constructor function
656 if (is_api_function) {
657 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
658 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
659 __ Call(code, RelocInfo::CODE_TARGET);
660 } else {
661 ParameterCount actual(r3);
662 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
663 }
664
665 // Store offset of return address for deoptimizer.
666 if (!is_api_function) {
667 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
668 }
669
670 // Restore context from the frame.
671 // r3: result
672 // sp[0]: receiver
673 // sp[1]: constructor function
674 // sp[2]: number of arguments (smi-tagged)
675 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
676
677 // If the result is an object (in the ECMA sense), we should get rid
678 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
679 // on page 74.
680 Label use_receiver, exit;
681
682 // If the result is a smi, it is *not* an object in the ECMA sense.
683 // r3: result
684 // sp[0]: receiver (newly allocated object)
685 // sp[1]: constructor function
686 // sp[2]: number of arguments (smi-tagged)
687 __ JumpIfSmi(r3, &use_receiver);
688
689 // If the type of the result (stored in its map) is less than
690 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
691 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
692 __ bge(&exit);
693
694 // Throw away the result of the constructor invocation and use the
695 // on-stack receiver as the result.
696 __ bind(&use_receiver);
697 __ LoadP(r3, MemOperand(sp));
698
699 // Remove receiver from the stack, remove caller arguments, and
700 // return.
701 __ bind(&exit);
702 // r3: result
703 // sp[0]: receiver (newly allocated object)
704 // sp[1]: constructor function
705 // sp[2]: number of arguments (smi-tagged)
706 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
707
708 // Leave construct frame.
709 }
710
711 __ SmiToPtrArrayOffset(r4, r4);
712 __ add(sp, sp, r4);
713 __ addi(sp, sp, Operand(kPointerSize));
714 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
715 __ blr();
716 }
717
718
719 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
720 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
721 }
722
723
724 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
725 Generate_JSConstructStubHelper(masm, true, false);
726 }
727
728
729 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
730 bool is_construct) {
731 // Called from Generate_JS_Entry
732 // r3: code entry
733 // r4: function
734 // r5: receiver
735 // r6: argc
736 // r7: argv
737 // r0,r8-r9, cp may be clobbered
738 ProfileEntryHookStub::MaybeCallEntryHook(masm);
739
740 // Clear the context before we push it when entering the internal frame.
741 __ li(cp, Operand::Zero());
742
743 // Enter an internal frame.
744 {
745 FrameScope scope(masm, StackFrame::INTERNAL);
746
747 // Set up the context from the function argument.
748 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
749
750 __ InitializeRootRegister();
751
752 // Push the function and the receiver onto the stack.
753 __ push(r4);
754 __ push(r5);
755
756 // Copy arguments to the stack in a loop.
757 // r4: function
758 // r6: argc
759 // r7: argv, i.e. points to first arg
760 Label loop, entry;
761 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
762 __ add(r5, r7, r0);
763 // r5 points past last arg.
764 __ b(&entry);
765 __ bind(&loop);
766 __ LoadP(r8, MemOperand(r7)); // read next parameter
767 __ addi(r7, r7, Operand(kPointerSize));
768 __ LoadP(r0, MemOperand(r8)); // dereference handle
769 __ push(r0); // push parameter
770 __ bind(&entry);
771 __ cmp(r7, r5);
772 __ bne(&loop);
773
774 // Initialize all JavaScript callee-saved registers, since they will be seen
775 // by the garbage collector as part of handlers.
776 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
777 __ mr(r14, r7);
778 __ mr(r15, r7);
779 __ mr(r16, r7);
780 __ mr(r17, r7);
781
782 // Invoke the code and pass argc as r3.
783 __ mr(r3, r6);
784 if (is_construct) {
785 // No type feedback cell is available
786 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
787 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
788 __ CallStub(&stub);
789 } else {
790 ParameterCount actual(r3);
791 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
792 }
793 // Exit the JS frame and remove the parameters (except function), and
794 // return.
795 }
796 __ blr();
797
798 // r3: result
799 }
800
801
802 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
803 Generate_JSEntryTrampolineHelper(masm, false);
804 }
805
806
807 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
808 Generate_JSEntryTrampolineHelper(masm, true);
809 }
810
811
812 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
813 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
814 GenerateTailCallToReturnedCode(masm);
815 }
816
817
818 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
819 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
820 // Push a copy of the function onto the stack.
821 // Push function as parameter to the runtime call.
822 __ Push(r4, r4);
823 // Whether to compile in a background thread.
824 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
825
826 __ CallRuntime(Runtime::kCompileOptimized, 2);
827 // Restore receiver.
828 __ pop(r4);
829 }
830
831
832 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
833 CallCompileOptimized(masm, false);
834 GenerateTailCallToReturnedCode(masm);
835 }
836
837
838 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
839 CallCompileOptimized(masm, true);
840 GenerateTailCallToReturnedCode(masm);
841 }
842
843
844 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
845 // For now, we are relying on the fact that make_code_young doesn't do any
846 // garbage collection which allows us to save/restore the registers without
847 // worrying about which of them contain pointers. We also don't build an
848 // internal frame to make the code faster, since we shouldn't have to do stack
849 // crawls in MakeCodeYoung. This seems a bit fragile.
850
851 __ mflr(r3);
852 // Adjust r3 to point to the start of the PlatformCodeAge sequence
853 __ subi(r3, r3, Operand(kCodeAgingPatchDelta));
854
855 // The following registers must be saved and restored when calling through to
856 // the runtime:
857 // r3 - contains return address (beginning of patch sequence)
858 // r4 - isolate
859 // ip - return address
860 FrameScope scope(masm, StackFrame::MANUAL);
861 __ MultiPush(ip.bit() | r3.bit() | r4.bit() | fp.bit());
862 __ PrepareCallCFunction(2, 0, r5);
863 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
864 __ CallCFunction(
865 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
866 __ MultiPop(ip.bit() | r3.bit() | r4.bit() | fp.bit());
867 __ mtlr(ip);
868 __ Jump(r3);
869 }
870
871 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
872 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
873 MacroAssembler* masm) { \
874 GenerateMakeCodeYoungAgainCommon(masm); \
875 } \
876 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
877 MacroAssembler* masm) { \
878 GenerateMakeCodeYoungAgainCommon(masm); \
879 }
880 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
881 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
882
883
884 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
885 // For now, we are relying on the fact that make_code_young doesn't do any
886 // garbage collection which allows us to save/restore the registers without
887 // worrying about which of them contain pointers. We also don't build an
888 // internal frame to make the code faster, since we shouldn't have to do stack
889 // crawls in MakeCodeYoung. This seems a bit fragile.
890
891 __ mflr(r3);
892 // Adjust r3 to point to the start of the PlatformCodeAge sequence
893 __ subi(r3, r3, Operand(kCodeAgingPatchDelta));
894
895 // The following registers must be saved and restored when calling through to
896 // the runtime:
897 // r3 - contains return address (beginning of patch sequence)
898 // r4 - isolate
899 // ip - return address
900 FrameScope scope(masm, StackFrame::MANUAL);
901 __ MultiPush(ip.bit() | r3.bit() | r4.bit() | fp.bit());
902 __ PrepareCallCFunction(2, 0, r5);
903 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
904 __ CallCFunction(
905 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
906 2);
907 __ MultiPop(ip.bit() | r3.bit() | r4.bit() | fp.bit());
908 __ mtlr(ip);
909
910 // Perform prologue operations usually performed by the young code stub.
911 __ PushFixedFrame(r4);
912 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
913
914 // Jump to point after the code-age stub.
915 __ addi(r3, r3, Operand(kNoCodeAgeSequenceLength));
916 __ Jump(r3);
917 }
918
919
920 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
921 GenerateMakeCodeYoungAgainCommon(masm);
922 }
923
924
925 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
926 SaveFPRegsMode save_doubles) {
927 {
928 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
929
930 // Preserve registers across notification, this is important for compiled
931 // stubs that tail call the runtime on deopts passing their parameters in
932 // registers.
933 __ MultiPush(kJSCallerSaved | kCalleeSaved);
934 // Pass the function and deoptimization type to the runtime system.
935 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
936 __ MultiPop(kJSCallerSaved | kCalleeSaved);
937 }
938
939 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
940 __ blr(); // Jump to miss handler
941 }
942
943
944 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
945 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
946 }
947
948
949 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
950 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
951 }
952
953
954 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
955 Deoptimizer::BailoutType type) {
956 {
957 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
958 // Pass the function and deoptimization type to the runtime system.
959 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
960 __ push(r3);
961 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
962 }
963
964 // Get the full codegen state from the stack and untag it -> r9.
965 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
966 __ SmiUntag(r9);
967 // Switch on the state.
968 Label with_tos_register, unknown_state;
969 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
970 __ bne(&with_tos_register);
971 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
972 __ Ret();
973
974 __ bind(&with_tos_register);
975 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
976 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
977 __ bne(&unknown_state);
978 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
979 __ Ret();
980
981 __ bind(&unknown_state);
982 __ stop("no cases left");
983 }
984
985
986 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
987 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
988 }
989
990
991 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
992 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
993 }
994
995
996 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
997 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
998 }
999
1000
1001 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1002 // Lookup the function in the JavaScript frame.
1003 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1004 {
1005 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1006 // Pass function as argument.
1007 __ push(r3);
1008 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1009 }
1010
1011 // If the code object is null, just return to the unoptimized code.
1012 Label skip;
1013 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1014 __ bne(&skip);
1015 __ Ret();
1016
1017 __ bind(&skip);
1018
1019 // Load deoptimization data from the code object.
1020 // <deopt_data> = <code>[#deoptimization_data_offset]
1021 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1022
1023 #if V8_OOL_CONSTANT_POOL
1024 {
1025 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1026 __ LoadP(kConstantPoolRegister,
1027 FieldMemOperand(r3, Code::kConstantPoolOffset));
1028 #endif
1029
1030 // Load the OSR entrypoint offset from the deoptimization data.
1031 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1032 __ LoadP(r4, FieldMemOperand(
1033 r4, FixedArray::OffsetOfElementAt(
1034 DeoptimizationInputData::kOsrPcOffsetIndex)));
1035 __ SmiUntag(r4);
1036
1037 // Compute the target address = code_obj + header_size + osr_offset
1038 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1039 __ add(r3, r3, r4);
1040 __ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1041 __ mtlr(r0);
1042
1043 // And "return" to the OSR entry point of the function.
1044 __ Ret();
1045 #if V8_OOL_CONSTANT_POOL
1046 }
1047 #endif
1048 }
1049
1050
1051 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1052 // We check the stack limit as indicator that recompilation might be done.
1053 Label ok;
1054 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1055 __ cmpl(sp, ip);
1056 __ bge(&ok);
1057 {
1058 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1059 __ CallRuntime(Runtime::kStackGuard, 0);
1060 }
1061 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1062 RelocInfo::CODE_TARGET);
1063
1064 __ bind(&ok);
1065 __ Ret();
1066 }
1067
1068
1069 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1070 // 1. Make sure we have at least one argument.
1071 // r3: actual number of arguments
1072 {
1073 Label done;
1074 __ cmpi(r3, Operand::Zero());
1075 __ bne(&done);
1076 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1077 __ push(r5);
1078 __ addi(r3, r3, Operand(1));
1079 __ bind(&done);
1080 }
1081
1082 // 2. Get the function to call (passed as receiver) from the stack, check
1083 // if it is a function.
1084 // r3: actual number of arguments
1085 Label slow, non_function;
1086 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1087 __ add(r4, sp, r4);
1088 __ LoadP(r4, MemOperand(r4));
1089 __ JumpIfSmi(r4, &non_function);
1090 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1091 __ bne(&slow);
1092
1093 // 3a. Patch the first argument if necessary when calling a function.
1094 // r3: actual number of arguments
1095 // r4: function
1096 Label shift_arguments;
1097 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1098 {
1099 Label convert_to_object, use_global_proxy, patch_receiver;
1100 // Change context eagerly in case we need the global receiver.
1101 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1102
1103 // Do not transform the receiver for strict mode functions.
1104 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1105 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1106 __ TestBit(r6,
1107 #if V8_TARGET_ARCH_PPC64
1108 SharedFunctionInfo::kStrictModeFunction,
1109 #else
1110 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1111 #endif
1112 r0);
1113 __ bne(&shift_arguments, cr0);
1114
1115 // Do not transform the receiver for native (Compilerhints already in r6).
1116 __ TestBit(r6,
1117 #if V8_TARGET_ARCH_PPC64
1118 SharedFunctionInfo::kNative,
1119 #else
1120 SharedFunctionInfo::kNative + kSmiTagSize,
1121 #endif
1122 r0);
1123 __ bne(&shift_arguments, cr0);
1124
1125 // Compute the receiver in sloppy mode.
1126 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1127 __ add(r5, sp, ip);
1128 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1129 // r3: actual number of arguments
1130 // r4: function
1131 // r5: first argument
1132 __ JumpIfSmi(r5, &convert_to_object);
1133
1134 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1135 __ cmp(r5, r6);
1136 __ beq(&use_global_proxy);
1137 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1138 __ cmp(r5, r6);
1139 __ beq(&use_global_proxy);
1140
1141 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1142 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1143 __ bge(&shift_arguments);
1144
1145 __ bind(&convert_to_object);
1146
1147 {
1148 // Enter an internal frame in order to preserve argument count.
1149 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1150 __ SmiTag(r3);
1151 __ Push(r3, r5);
1152 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1153 __ mr(r5, r3);
1154
1155 __ pop(r3);
1156 __ SmiUntag(r3);
1157
1158 // Exit the internal frame.
1159 }
1160
1161 // Restore the function to r4, and the flag to r7.
1162 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1163 __ add(r7, sp, r7);
1164 __ LoadP(r4, MemOperand(r7));
1165 __ li(r7, Operand::Zero());
1166 __ b(&patch_receiver);
1167
1168 __ bind(&use_global_proxy);
1169 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1170 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1171
1172 __ bind(&patch_receiver);
1173 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1174 __ add(r6, sp, ip);
1175 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1176
1177 __ b(&shift_arguments);
1178 }
1179
1180 // 3b. Check for function proxy.
1181 __ bind(&slow);
1182 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1183 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1184 __ beq(&shift_arguments);
1185 __ bind(&non_function);
1186 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1187
1188 // 3c. Patch the first argument when calling a non-function. The
1189 // CALL_NON_FUNCTION builtin expects the non-function callee as
1190 // receiver, so overwrite the first argument which will ultimately
1191 // become the receiver.
1192 // r3: actual number of arguments
1193 // r4: function
1194 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1195 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1196 __ add(r5, sp, ip);
1197 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1198
1199 // 4. Shift arguments and return address one slot down on the stack
1200 // (overwriting the original receiver). Adjust argument count to make
1201 // the original first argument the new receiver.
1202 // r3: actual number of arguments
1203 // r4: function
1204 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1205 __ bind(&shift_arguments);
1206 {
1207 Label loop;
1208 // Calculate the copy start address (destination). Copy end address is sp.
1209 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1210 __ add(r5, sp, ip);
1211
1212 __ bind(&loop);
1213 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1214 __ StoreP(ip, MemOperand(r5));
1215 __ subi(r5, r5, Operand(kPointerSize));
1216 __ cmp(r5, sp);
1217 __ bne(&loop);
1218 // Adjust the actual number of arguments and remove the top element
1219 // (which is a copy of the last argument).
1220 __ subi(r3, r3, Operand(1));
1221 __ pop();
1222 }
1223
1224 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1225 // or a function proxy via CALL_FUNCTION_PROXY.
1226 // r3: actual number of arguments
1227 // r4: function
1228 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1229 {
1230 Label function, non_proxy;
1231 __ cmpi(r7, Operand::Zero());
1232 __ beq(&function);
1233 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1234 __ li(r5, Operand::Zero());
1235 __ cmpi(r7, Operand(1));
1236 __ bne(&non_proxy);
1237
1238 __ push(r4); // re-add proxy object as additional argument
1239 __ addi(r3, r3, Operand(1));
1240 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1241 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1242 RelocInfo::CODE_TARGET);
1243
1244 __ bind(&non_proxy);
1245 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1246 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1247 RelocInfo::CODE_TARGET);
1248 __ bind(&function);
1249 }
1250
1251 // 5b. Get the code to call from the function and check that the number of
1252 // expected arguments matches what we're providing. If so, jump
1253 // (tail-call) to the code in register edx without checking arguments.
1254 // r3: actual number of arguments
1255 // r4: function
1256 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1257 __ LoadWordArith(
1258 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1259 #if !V8_TARGET_ARCH_PPC64
1260 __ SmiUntag(r5);
1261 #endif
1262 __ cmp(r5, r3); // Check formal and actual parameter counts.
1263 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1264 RelocInfo::CODE_TARGET, ne);
1265
1266 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1267 ParameterCount expected(0);
1268 __ InvokeCode(r6, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1269 }
1270
1271
1272 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1273 const int kIndexOffset =
1274 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1275 const int kLimitOffset =
1276 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1277 const int kArgsOffset = 2 * kPointerSize;
1278 const int kRecvOffset = 3 * kPointerSize;
1279 const int kFunctionOffset = 4 * kPointerSize;
1280
1281 {
1282 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1283
1284 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1285 __ push(r3);
1286 __ LoadP(r3, MemOperand(fp, kArgsOffset)); // get the args array
1287 __ push(r3);
1288 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1289
1290 // Check the stack for overflow. We are not trying to catch
1291 // interruptions (e.g. debug break and preemption) here, so the "real stack
1292 // limit" is checked.
1293 Label okay;
1294 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1295 // Make r5 the space we have left. The stack might already be overflowed
1296 // here which will cause r5 to become negative.
1297 __ sub(r5, sp, r5);
1298 // Check if the arguments will overflow the stack.
1299 __ SmiToPtrArrayOffset(r0, r3);
1300 __ cmp(r5, r0);
1301 __ bgt(&okay); // Signed comparison.
1302
1303 // Out of stack space.
1304 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1305 __ Push(r4, r3);
1306 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1307 // End of stack check.
1308
1309 // Push current limit and index.
1310 __ bind(&okay);
1311 __ li(r4, Operand::Zero());
1312 __ Push(r3, r4); // limit and initial index.
1313
1314 // Get the receiver.
1315 __ LoadP(r3, MemOperand(fp, kRecvOffset));
1316
1317 // Check that the function is a JS function (otherwise it must be a proxy).
1318 Label push_receiver;
1319 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1320 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1321 __ bne(&push_receiver);
1322
1323 // Change context eagerly to get the right global object if necessary.
1324 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1325 // Load the shared function info while the function is still in r4.
1326 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1327
1328 // Compute the receiver.
1329 // Do not transform the receiver for strict mode functions.
1330 Label call_to_object, use_global_proxy;
1331 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1332 __ TestBit(r5,
1333 #if V8_TARGET_ARCH_PPC64
1334 SharedFunctionInfo::kStrictModeFunction,
1335 #else
1336 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1337 #endif
1338 r0);
1339 __ bne(&push_receiver, cr0);
1340
1341 // Do not transform the receiver for strict mode functions.
1342 __ TestBit(r5,
1343 #if V8_TARGET_ARCH_PPC64
1344 SharedFunctionInfo::kNative,
1345 #else
1346 SharedFunctionInfo::kNative + kSmiTagSize,
1347 #endif
1348 r0);
1349 __ bne(&push_receiver, cr0);
1350
1351 // Compute the receiver in sloppy mode.
1352 __ JumpIfSmi(r3, &call_to_object);
1353 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1354 __ cmp(r3, r4);
1355 __ beq(&use_global_proxy);
1356 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1357 __ cmp(r3, r4);
1358 __ beq(&use_global_proxy);
1359
1360 // Check if the receiver is already a JavaScript object.
1361 // r3: receiver
1362 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1363 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1364 __ bge(&push_receiver);
1365
1366 // Convert the receiver to a regular object.
1367 // r3: receiver
1368 __ bind(&call_to_object);
1369 __ push(r3);
1370 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1371 __ b(&push_receiver);
1372
1373 __ bind(&use_global_proxy);
1374 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1375 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1376
1377 // Push the receiver.
1378 // r3: receiver
1379 __ bind(&push_receiver);
1380 __ push(r3);
1381
1382 // Copy all arguments from the array to the stack.
1383 Label entry, loop;
1384 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1385 __ b(&entry);
1386
1387 // Load the current argument from the arguments array and push it to the
1388 // stack.
1389 // r3: current argument index
1390 __ bind(&loop);
1391 __ LoadP(r4, MemOperand(fp, kArgsOffset));
1392 __ Push(r4, r3);
1393
1394 // Call the runtime to access the property in the arguments array.
1395 __ CallRuntime(Runtime::kGetProperty, 2);
1396 __ push(r3);
1397
1398 // Use inline caching to access the arguments.
1399 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1400 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1401 __ StoreP(r3, MemOperand(fp, kIndexOffset));
1402
1403 // Test if the copy loop has finished copying all the elements from the
1404 // arguments object.
1405 __ bind(&entry);
1406 __ LoadP(r4, MemOperand(fp, kLimitOffset));
1407 __ cmp(r3, r4);
1408 __ bne(&loop);
1409
1410 // Call the function.
1411 Label call_proxy;
1412 ParameterCount actual(r3);
1413 __ SmiUntag(r3);
1414 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1415 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1416 __ bne(&call_proxy);
1417 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1418
1419 frame_scope.GenerateLeaveFrame();
1420 __ addi(sp, sp, Operand(3 * kPointerSize));
1421 __ blr();
1422
1423 // Call the function proxy.
1424 __ bind(&call_proxy);
1425 __ push(r4); // add function proxy as last argument
1426 __ addi(r3, r3, Operand(1));
1427 __ li(r5, Operand::Zero());
1428 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1429 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1430 RelocInfo::CODE_TARGET);
1431
1432 // Tear down the internal frame and remove function, receiver and args.
1433 }
1434 __ addi(sp, sp, Operand(3 * kPointerSize));
1435 __ blr();
1436 }
1437
1438
1439 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1440 Label* stack_overflow) {
1441 // ----------- S t a t e -------------
1442 // -- r3 : actual number of arguments
1443 // -- r4 : function (passed through to callee)
1444 // -- r5 : expected number of arguments
1445 // -----------------------------------
1446 // Check the stack for overflow. We are not trying to catch
1447 // interruptions (e.g. debug break and preemption) here, so the "real stack
1448 // limit" is checked.
1449 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1450 // Make r8 the space we have left. The stack might already be overflowed
1451 // here which will cause r8 to become negative.
1452 __ sub(r8, sp, r8);
1453 // Check if the arguments will overflow the stack.
1454 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1455 __ cmp(r8, r0);
1456 __ ble(stack_overflow); // Signed comparison.
1457 }
1458
1459
1460 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1461 __ SmiTag(r3);
1462 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1463 __ mflr(r0);
1464 __ push(r0);
1465 #if V8_OOL_CONSTANT_POOL
1466 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1467 #else
1468 __ Push(fp, r7, r4, r3);
1469 #endif
1470 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1471 kPointerSize));
1472 }
1473
1474
1475 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1476 // ----------- S t a t e -------------
1477 // -- r3 : result being passed through
1478 // -----------------------------------
1479 // Get the number of arguments passed (as a smi), tear down the frame and
1480 // then tear down the parameters.
1481 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1482 kPointerSize)));
1483 // Could use LeaveFrame(StackFrame::ARGUMENTS_ADAPTER) here
1484 // however, the sequence below is slightly more optimal
1485 #if V8_OOL_CONSTANT_POOL
1486 __ addi(sp, fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1487 __ LoadP(kConstantPoolRegister, MemOperand(sp));
1488 __ LoadP(fp, MemOperand(sp, kPointerSize));
1489 __ LoadP(r0, MemOperand(sp, 2 * kPointerSize));
1490 int slots = 3; // adjust for kConstantPoolRegister + fp + lr below
1491 #else
1492 __ mr(sp, fp);
1493 __ LoadP(fp, MemOperand(sp));
1494 __ LoadP(r0, MemOperand(sp, kPointerSize));
1495 int slots = 2; // adjust for fp + lr below
1496 #endif
1497 __ mtlr(r0);
1498 __ SmiToPtrArrayOffset(r0, r4);
1499 __ add(sp, sp, r0);
1500 __ addi(sp, sp,
1501 Operand((1 + slots) * kPointerSize)); // adjust for receiver + others
1502 }
1503
1504
1505 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1506 // ----------- S t a t e -------------
1507 // -- r3 : actual number of arguments
1508 // -- r4 : function (passed through to callee)
1509 // -- r5 : expected number of arguments
1510 // -----------------------------------
1511
1512 Label stack_overflow;
1513 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1514 Label invoke, dont_adapt_arguments;
1515
1516 Label enough, too_few;
1517 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1518 __ cmp(r3, r5);
1519 __ blt(&too_few);
1520 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1521 __ beq(&dont_adapt_arguments);
1522
1523 { // Enough parameters: actual >= expected
1524 __ bind(&enough);
1525 EnterArgumentsAdaptorFrame(masm);
1526
1527 // Calculate copy start address into r3 and copy end address into r5.
1528 // r3: actual number of arguments as a smi
1529 // r4: function
1530 // r5: expected number of arguments
1531 // r6: code entry to call
1532 __ SmiToPtrArrayOffset(r3, r3);
1533 __ add(r3, r3, fp);
1534 // adjust for return address and receiver
1535 __ addi(r3, r3, Operand(2 * kPointerSize));
1536 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1537 __ sub(r5, r3, r5);
1538
1539 // Copy the arguments (including the receiver) to the new stack frame.
1540 // r3: copy start address
1541 // r4: function
1542 // r5: copy end address
1543 // r6: code entry to call
1544
1545 Label copy;
1546 __ bind(&copy);
1547 __ LoadP(ip, MemOperand(r3, 0));
1548 __ push(ip);
1549 __ cmp(r3, r5); // Compare before moving to next argument.
1550 __ subi(r3, r3, Operand(kPointerSize));
1551 __ bne(&copy);
1552
1553 __ b(&invoke);
1554 }
1555
1556 { // Too few parameters: Actual < expected
1557 __ bind(&too_few);
1558 EnterArgumentsAdaptorFrame(masm);
1559
1560 // Calculate copy start address into r0 and copy end address is fp.
1561 // r3: actual number of arguments as a smi
1562 // r4: function
1563 // r5: expected number of arguments
1564 // r6: code entry to call
1565 __ SmiToPtrArrayOffset(r3, r3);
1566 __ add(r3, r3, fp);
1567
1568 // Copy the arguments (including the receiver) to the new stack frame.
1569 // r3: copy start address
1570 // r4: function
1571 // r5: expected number of arguments
1572 // r6: code entry to call
1573 Label copy;
1574 __ bind(&copy);
1575 // Adjust load for return address and receiver.
1576 __ LoadP(ip, MemOperand(r3, 2 * kPointerSize));
1577 __ push(ip);
1578 __ cmp(r3, fp); // Compare before moving to next argument.
1579 __ subi(r3, r3, Operand(kPointerSize));
1580 __ bne(&copy);
1581
1582 // Fill the remaining expected arguments with undefined.
1583 // r4: function
1584 // r5: expected number of arguments
1585 // r6: code entry to call
1586 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1587 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1588 __ sub(r5, fp, r5);
1589 // Adjust for frame.
1590 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1591 2 * kPointerSize));
1592
1593 Label fill;
1594 __ bind(&fill);
1595 __ push(ip);
1596 __ cmp(sp, r5);
1597 __ bne(&fill);
1598 }
1599
1600 // Call the entry point.
1601 __ bind(&invoke);
1602 __ Call(r6);
1603
1604 // Store offset of return address for deoptimizer.
1605 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1606
1607 // Exit frame and return.
1608 LeaveArgumentsAdaptorFrame(masm);
1609 __ blr();
1610
1611
1612 // -------------------------------------------
1613 // Dont adapt arguments.
1614 // -------------------------------------------
1615 __ bind(&dont_adapt_arguments);
1616 __ Jump(r6);
1617
1618 __ bind(&stack_overflow);
1619 {
1620 FrameScope frame(masm, StackFrame::MANUAL);
1621 EnterArgumentsAdaptorFrame(masm);
1622 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1623 __ bkpt(0);
1624 }
1625 }
1626
1627
1628 #undef __
1629 }
1630 } // namespace v8::internal
1631
1632 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« src/hydrogen-bch.cc ('K') | « src/ppc/assembler-ppc-inl.h ('k') | src/ppc/code-stubs-ppc.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698