Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(66)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 5862002: Version 3.0.2. (Closed)
Patch Set: Created 10 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 12 matching lines...) Expand all
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
31 31
32 #include "codegen-inl.h" 32 #include "codegen-inl.h"
33 #include "deoptimizer.h" 33 #include "macro-assembler.h"
34 #include "full-codegen.h"
35 34
36 namespace v8 { 35 namespace v8 {
37 namespace internal { 36 namespace internal {
38 37
39
40 #define __ ACCESS_MASM(masm) 38 #define __ ACCESS_MASM(masm)
41 39
42 40
43 void Builtins::Generate_Adaptor(MacroAssembler* masm, 41 void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id, 42 CFunctionId id,
45 BuiltinExtraArguments extra_args) { 43 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e ------------- 44 // ----------- S t a t e -------------
47 // -- rax : number of arguments excluding receiver 45 // -- rax : number of arguments excluding receiver
48 // -- rdi : called function (only guaranteed when 46 // -- rdi : called function (only guaranteed when
49 // extra_args requires it) 47 // extra_args requires it)
(...skipping 16 matching lines...) Expand all
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); 64 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 } 65 }
68 66
69 // JumpToExternalReference expects rax to contain the number of arguments 67 // JumpToExternalReference expects rax to contain the number of arguments
70 // including the receiver and the extra arguments. 68 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1)); 69 __ addq(rax, Immediate(num_extra_args + 1));
72 __ JumpToExternalReference(ExternalReference(id), 1); 70 __ JumpToExternalReference(ExternalReference(id), 1);
73 } 71 }
74 72
75 73
74 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
75 __ push(rbp);
76 __ movq(rbp, rsp);
77
78 // Store the arguments adaptor context sentinel.
79 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
80
81 // Push the function on the stack.
82 __ push(rdi);
83
84 // Preserve the number of arguments on the stack. Must preserve both
85 // rax and rbx because these registers are used when copying the
86 // arguments and the receiver.
87 __ Integer32ToSmi(rcx, rax);
88 __ push(rcx);
89 }
90
91
92 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
93 // Retrieve the number of arguments from the stack. Number is a Smi.
94 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
95
96 // Leave the frame.
97 __ movq(rsp, rbp);
98 __ pop(rbp);
99
100 // Remove caller arguments from the stack.
101 __ pop(rcx);
102 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
103 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
104 __ push(rcx);
105 }
106
107
108 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
109 // ----------- S t a t e -------------
110 // -- rax : actual number of arguments
111 // -- rbx : expected number of arguments
112 // -- rdx : code entry to call
113 // -----------------------------------
114
115 Label invoke, dont_adapt_arguments;
116 __ IncrementCounter(&Counters::arguments_adaptors, 1);
117
118 Label enough, too_few;
119 __ cmpq(rax, rbx);
120 __ j(less, &too_few);
121 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
122 __ j(equal, &dont_adapt_arguments);
123
124 { // Enough parameters: Actual >= expected.
125 __ bind(&enough);
126 EnterArgumentsAdaptorFrame(masm);
127
128 // Copy receiver and all expected arguments.
129 const int offset = StandardFrameConstants::kCallerSPOffset;
130 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
131 __ movq(rcx, Immediate(-1)); // account for receiver
132
133 Label copy;
134 __ bind(&copy);
135 __ incq(rcx);
136 __ push(Operand(rax, 0));
137 __ subq(rax, Immediate(kPointerSize));
138 __ cmpq(rcx, rbx);
139 __ j(less, &copy);
140 __ jmp(&invoke);
141 }
142
143 { // Too few parameters: Actual < expected.
144 __ bind(&too_few);
145 EnterArgumentsAdaptorFrame(masm);
146
147 // Copy receiver and all actual arguments.
148 const int offset = StandardFrameConstants::kCallerSPOffset;
149 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
150 __ movq(rcx, Immediate(-1)); // account for receiver
151
152 Label copy;
153 __ bind(&copy);
154 __ incq(rcx);
155 __ push(Operand(rdi, 0));
156 __ subq(rdi, Immediate(kPointerSize));
157 __ cmpq(rcx, rax);
158 __ j(less, &copy);
159
160 // Fill remaining expected arguments with undefined values.
161 Label fill;
162 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
163 __ bind(&fill);
164 __ incq(rcx);
165 __ push(kScratchRegister);
166 __ cmpq(rcx, rbx);
167 __ j(less, &fill);
168
169 // Restore function pointer.
170 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
171 }
172
173 // Call the entry point.
174 __ bind(&invoke);
175 __ call(rdx);
176
177 // Leave frame and return.
178 LeaveArgumentsAdaptorFrame(masm);
179 __ ret(0);
180
181 // -------------------------------------------
182 // Dont adapt arguments.
183 // -------------------------------------------
184 __ bind(&dont_adapt_arguments);
185 __ jmp(rdx);
186 }
187
188
189 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
190 // Stack Layout:
191 // rsp[0]: Return address
192 // rsp[1]: Argument n
193 // rsp[2]: Argument n-1
194 // ...
195 // rsp[n]: Argument 1
196 // rsp[n+1]: Receiver (function to call)
197 //
198 // rax contains the number of arguments, n, not counting the receiver.
199 //
200 // 1. Make sure we have at least one argument.
201 { Label done;
202 __ testq(rax, rax);
203 __ j(not_zero, &done);
204 __ pop(rbx);
205 __ Push(Factory::undefined_value());
206 __ push(rbx);
207 __ incq(rax);
208 __ bind(&done);
209 }
210
211 // 2. Get the function to call (passed as receiver) from the stack, check
212 // if it is a function.
213 Label non_function;
214 // The function to call is at position n+1 on the stack.
215 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
216 __ JumpIfSmi(rdi, &non_function);
217 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
218 __ j(not_equal, &non_function);
219
220 // 3a. Patch the first argument if necessary when calling a function.
221 Label shift_arguments;
222 { Label convert_to_object, use_global_receiver, patch_receiver;
223 // Change context eagerly in case we need the global receiver.
224 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
225
226 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
227 __ JumpIfSmi(rbx, &convert_to_object);
228
229 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
230 __ j(equal, &use_global_receiver);
231 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
232 __ j(equal, &use_global_receiver);
233
234 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
235 __ j(below, &convert_to_object);
236 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
237 __ j(below_equal, &shift_arguments);
238
239 __ bind(&convert_to_object);
240 __ EnterInternalFrame(); // In order to preserve argument count.
241 __ Integer32ToSmi(rax, rax);
242 __ push(rax);
243
244 __ push(rbx);
245 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
246 __ movq(rbx, rax);
247
248 __ pop(rax);
249 __ SmiToInteger32(rax, rax);
250 __ LeaveInternalFrame();
251 // Restore the function to rdi.
252 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
253 __ jmp(&patch_receiver);
254
255 // Use the global receiver object from the called function as the
256 // receiver.
257 __ bind(&use_global_receiver);
258 const int kGlobalIndex =
259 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
260 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
261 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
262 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
263 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
264
265 __ bind(&patch_receiver);
266 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
267
268 __ jmp(&shift_arguments);
269 }
270
271
272 // 3b. Patch the first argument when calling a non-function. The
273 // CALL_NON_FUNCTION builtin expects the non-function callee as
274 // receiver, so overwrite the first argument which will ultimately
275 // become the receiver.
276 __ bind(&non_function);
277 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
278 __ xor_(rdi, rdi);
279
280 // 4. Shift arguments and return address one slot down on the stack
281 // (overwriting the original receiver). Adjust argument count to make
282 // the original first argument the new receiver.
283 __ bind(&shift_arguments);
284 { Label loop;
285 __ movq(rcx, rax);
286 __ bind(&loop);
287 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
288 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
289 __ decq(rcx);
290 __ j(not_sign, &loop); // While non-negative (to copy return address).
291 __ pop(rbx); // Discard copy of return address.
292 __ decq(rax); // One fewer argument (first argument is new receiver).
293 }
294
295 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
296 { Label function;
297 __ testq(rdi, rdi);
298 __ j(not_zero, &function);
299 __ xor_(rbx, rbx);
300 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
301 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
302 RelocInfo::CODE_TARGET);
303 __ bind(&function);
304 }
305
306 // 5b. Get the code to call from the function and check that the number of
307 // expected arguments matches what we're providing. If so, jump
308 // (tail-call) to the code in register edx without checking arguments.
309 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
310 __ movsxlq(rbx,
311 FieldOperand(rdx,
312 SharedFunctionInfo::kFormalParameterCountOffset));
313 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
314 __ cmpq(rax, rbx);
315 __ j(not_equal,
316 Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
317 RelocInfo::CODE_TARGET);
318
319 ParameterCount expected(0);
320 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
321 }
322
323
324 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
325 // Stack at entry:
326 // rsp: return address
327 // rsp+8: arguments
328 // rsp+16: receiver ("this")
329 // rsp+24: function
330 __ EnterInternalFrame();
331 // Stack frame:
332 // rbp: Old base pointer
333 // rbp[1]: return address
334 // rbp[2]: function arguments
335 // rbp[3]: receiver
336 // rbp[4]: function
337 static const int kArgumentsOffset = 2 * kPointerSize;
338 static const int kReceiverOffset = 3 * kPointerSize;
339 static const int kFunctionOffset = 4 * kPointerSize;
340 __ push(Operand(rbp, kFunctionOffset));
341 __ push(Operand(rbp, kArgumentsOffset));
342 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
343
344 // Check the stack for overflow. We are not trying need to catch
345 // interruptions (e.g. debug break and preemption) here, so the "real stack
346 // limit" is checked.
347 Label okay;
348 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
349 __ movq(rcx, rsp);
350 // Make rcx the space we have left. The stack might already be overflowed
351 // here which will cause rcx to become negative.
352 __ subq(rcx, kScratchRegister);
353 // Make rdx the space we need for the array when it is unrolled onto the
354 // stack.
355 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
356 // Check if the arguments will overflow the stack.
357 __ cmpq(rcx, rdx);
358 __ j(greater, &okay); // Signed comparison.
359
360 // Out of stack space.
361 __ push(Operand(rbp, kFunctionOffset));
362 __ push(rax);
363 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
364 __ bind(&okay);
365 // End of stack check.
366
367 // Push current index and limit.
368 const int kLimitOffset =
369 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
370 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
371 __ push(rax); // limit
372 __ push(Immediate(0)); // index
373
374 // Change context eagerly to get the right global object if
375 // necessary.
376 __ movq(rdi, Operand(rbp, kFunctionOffset));
377 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
378
379 // Compute the receiver.
380 Label call_to_object, use_global_receiver, push_receiver;
381 __ movq(rbx, Operand(rbp, kReceiverOffset));
382 __ JumpIfSmi(rbx, &call_to_object);
383 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
384 __ j(equal, &use_global_receiver);
385 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
386 __ j(equal, &use_global_receiver);
387
388 // If given receiver is already a JavaScript object then there's no
389 // reason for converting it.
390 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
391 __ j(below, &call_to_object);
392 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
393 __ j(below_equal, &push_receiver);
394
395 // Convert the receiver to an object.
396 __ bind(&call_to_object);
397 __ push(rbx);
398 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
399 __ movq(rbx, rax);
400 __ jmp(&push_receiver);
401
402 // Use the current global receiver object as the receiver.
403 __ bind(&use_global_receiver);
404 const int kGlobalOffset =
405 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
406 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
407 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
408 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
409 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
410
411 // Push the receiver.
412 __ bind(&push_receiver);
413 __ push(rbx);
414
415 // Copy all arguments from the array to the stack.
416 Label entry, loop;
417 __ movq(rax, Operand(rbp, kIndexOffset));
418 __ jmp(&entry);
419 __ bind(&loop);
420 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
421
422 // Use inline caching to speed up access to arguments.
423 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
424 __ Call(ic, RelocInfo::CODE_TARGET);
425 // It is important that we do not have a test instruction after the
426 // call. A test instruction after the call is used to indicate that
427 // we have generated an inline version of the keyed load. In this
428 // case, we know that we are not generating a test instruction next.
429
430 // Push the nth argument.
431 __ push(rax);
432
433 // Update the index on the stack and in register rax.
434 __ movq(rax, Operand(rbp, kIndexOffset));
435 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
436 __ movq(Operand(rbp, kIndexOffset), rax);
437
438 __ bind(&entry);
439 __ cmpq(rax, Operand(rbp, kLimitOffset));
440 __ j(not_equal, &loop);
441
442 // Invoke the function.
443 ParameterCount actual(rax);
444 __ SmiToInteger32(rax, rax);
445 __ movq(rdi, Operand(rbp, kFunctionOffset));
446 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
447
448 __ LeaveInternalFrame();
449 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
450 }
451
452
453 // Load the built-in Array function from the current context.
454 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
455 // Load the global context.
456 __ movq(result, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
457 __ movq(result, FieldOperand(result, GlobalObject::kGlobalContextOffset));
458 // Load the Array function from the global context.
459 __ movq(result,
460 Operand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
461 }
462
463
464 // Number of empty elements to allocate for an empty array.
465 static const int kPreallocatedArrayElements = 4;
466
467
468 // Allocate an empty JSArray. The allocated array is put into the result
469 // register. If the parameter initial_capacity is larger than zero an elements
470 // backing store is allocated with this size and filled with the hole values.
471 // Otherwise the elements backing store is set to the empty FixedArray.
472 static void AllocateEmptyJSArray(MacroAssembler* masm,
473 Register array_function,
474 Register result,
475 Register scratch1,
476 Register scratch2,
477 Register scratch3,
478 int initial_capacity,
479 Label* gc_required) {
480 ASSERT(initial_capacity >= 0);
481
482 // Load the initial map from the array function.
483 __ movq(scratch1, FieldOperand(array_function,
484 JSFunction::kPrototypeOrInitialMapOffset));
485
486 // Allocate the JSArray object together with space for a fixed array with the
487 // requested elements.
488 int size = JSArray::kSize;
489 if (initial_capacity > 0) {
490 size += FixedArray::SizeFor(initial_capacity);
491 }
492 __ AllocateInNewSpace(size,
493 result,
494 scratch2,
495 scratch3,
496 gc_required,
497 TAG_OBJECT);
498
499 // Allocated the JSArray. Now initialize the fields except for the elements
500 // array.
501 // result: JSObject
502 // scratch1: initial map
503 // scratch2: start of next object
504 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
505 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
506 Factory::empty_fixed_array());
507 // Field JSArray::kElementsOffset is initialized later.
508 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
509
510 // If no storage is requested for the elements array just set the empty
511 // fixed array.
512 if (initial_capacity == 0) {
513 __ Move(FieldOperand(result, JSArray::kElementsOffset),
514 Factory::empty_fixed_array());
515 return;
516 }
517
518 // Calculate the location of the elements array and set elements array member
519 // of the JSArray.
520 // result: JSObject
521 // scratch2: start of next object
522 __ lea(scratch1, Operand(result, JSArray::kSize));
523 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
524
525 // Initialize the FixedArray and fill it with holes. FixedArray length is
526 // stored as a smi.
527 // result: JSObject
528 // scratch1: elements array
529 // scratch2: start of next object
530 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
531 Factory::fixed_array_map());
532 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
533 Smi::FromInt(initial_capacity));
534
535 // Fill the FixedArray with the hole value. Inline the code if short.
536 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
537 static const int kLoopUnfoldLimit = 4;
538 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
539 __ Move(scratch3, Factory::the_hole_value());
540 if (initial_capacity <= kLoopUnfoldLimit) {
541 // Use a scratch register here to have only one reloc info when unfolding
542 // the loop.
543 for (int i = 0; i < initial_capacity; i++) {
544 __ movq(FieldOperand(scratch1,
545 FixedArray::kHeaderSize + i * kPointerSize),
546 scratch3);
547 }
548 } else {
549 Label loop, entry;
550 __ jmp(&entry);
551 __ bind(&loop);
552 __ movq(Operand(scratch1, 0), scratch3);
553 __ addq(scratch1, Immediate(kPointerSize));
554 __ bind(&entry);
555 __ cmpq(scratch1, scratch2);
556 __ j(below, &loop);
557 }
558 }
559
560
561 // Allocate a JSArray with the number of elements stored in a register. The
562 // register array_function holds the built-in Array function and the register
563 // array_size holds the size of the array as a smi. The allocated array is put
564 // into the result register and beginning and end of the FixedArray elements
565 // storage is put into registers elements_array and elements_array_end (see
566 // below for when that is not the case). If the parameter fill_with_holes is
567 // true the allocated elements backing store is filled with the hole values
568 // otherwise it is left uninitialized. When the backing store is filled the
569 // register elements_array is scratched.
570 static void AllocateJSArray(MacroAssembler* masm,
571 Register array_function, // Array function.
572 Register array_size, // As a smi.
573 Register result,
574 Register elements_array,
575 Register elements_array_end,
576 Register scratch,
577 bool fill_with_hole,
578 Label* gc_required) {
579 Label not_empty, allocated;
580
581 // Load the initial map from the array function.
582 __ movq(elements_array,
583 FieldOperand(array_function,
584 JSFunction::kPrototypeOrInitialMapOffset));
585
586 // Check whether an empty sized array is requested.
587 __ testq(array_size, array_size);
588 __ j(not_zero, &not_empty);
589
590 // If an empty array is requested allocate a small elements array anyway. This
591 // keeps the code below free of special casing for the empty array.
592 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
593 __ AllocateInNewSpace(size,
594 result,
595 elements_array_end,
596 scratch,
597 gc_required,
598 TAG_OBJECT);
599 __ jmp(&allocated);
600
601 // Allocate the JSArray object together with space for a FixedArray with the
602 // requested elements.
603 __ bind(&not_empty);
604 SmiIndex index =
605 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
606 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
607 index.scale,
608 index.reg,
609 result,
610 elements_array_end,
611 scratch,
612 gc_required,
613 TAG_OBJECT);
614
615 // Allocated the JSArray. Now initialize the fields except for the elements
616 // array.
617 // result: JSObject
618 // elements_array: initial map
619 // elements_array_end: start of next object
620 // array_size: size of array (smi)
621 __ bind(&allocated);
622 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
623 __ Move(elements_array, Factory::empty_fixed_array());
624 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
625 // Field JSArray::kElementsOffset is initialized later.
626 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
627
628 // Calculate the location of the elements array and set elements array member
629 // of the JSArray.
630 // result: JSObject
631 // elements_array_end: start of next object
632 // array_size: size of array (smi)
633 __ lea(elements_array, Operand(result, JSArray::kSize));
634 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
635
636 // Initialize the fixed array. FixedArray length is stored as a smi.
637 // result: JSObject
638 // elements_array: elements array
639 // elements_array_end: start of next object
640 // array_size: size of array (smi)
641 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
642 Factory::fixed_array_map());
643 Label not_empty_2, fill_array;
644 __ SmiTest(array_size);
645 __ j(not_zero, &not_empty_2);
646 // Length of the FixedArray is the number of pre-allocated elements even
647 // though the actual JSArray has length 0.
648 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
649 Smi::FromInt(kPreallocatedArrayElements));
650 __ jmp(&fill_array);
651 __ bind(&not_empty_2);
652 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
653 // same.
654 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
655
656 // Fill the allocated FixedArray with the hole value if requested.
657 // result: JSObject
658 // elements_array: elements array
659 // elements_array_end: start of next object
660 __ bind(&fill_array);
661 if (fill_with_hole) {
662 Label loop, entry;
663 __ Move(scratch, Factory::the_hole_value());
664 __ lea(elements_array, Operand(elements_array,
665 FixedArray::kHeaderSize - kHeapObjectTag));
666 __ jmp(&entry);
667 __ bind(&loop);
668 __ movq(Operand(elements_array, 0), scratch);
669 __ addq(elements_array, Immediate(kPointerSize));
670 __ bind(&entry);
671 __ cmpq(elements_array, elements_array_end);
672 __ j(below, &loop);
673 }
674 }
675
676
677 // Create a new array for the built-in Array function. This function allocates
678 // the JSArray object and the FixedArray elements array and initializes these.
679 // If the Array cannot be constructed in native code the runtime is called. This
680 // function assumes the following state:
681 // rdi: constructor (built-in Array function)
682 // rax: argc
683 // rsp[0]: return address
684 // rsp[8]: last argument
685 // This function is used for both construct and normal calls of Array. The only
686 // difference between handling a construct call and a normal call is that for a
687 // construct call the constructor function in rdi needs to be preserved for
688 // entering the generic code. In both cases argc in rax needs to be preserved.
689 // Both registers are preserved by this code so no need to differentiate between
690 // a construct call and a normal call.
691 static void ArrayNativeCode(MacroAssembler* masm,
692 Label *call_generic_code) {
693 Label argc_one_or_more, argc_two_or_more;
694
695 // Check for array construction with zero arguments.
696 __ testq(rax, rax);
697 __ j(not_zero, &argc_one_or_more);
698
699 // Handle construction of an empty array.
700 AllocateEmptyJSArray(masm,
701 rdi,
702 rbx,
703 rcx,
704 rdx,
705 r8,
706 kPreallocatedArrayElements,
707 call_generic_code);
708 __ IncrementCounter(&Counters::array_function_native, 1);
709 __ movq(rax, rbx);
710 __ ret(kPointerSize);
711
712 // Check for one argument. Bail out if argument is not smi or if it is
713 // negative.
714 __ bind(&argc_one_or_more);
715 __ cmpq(rax, Immediate(1));
716 __ j(not_equal, &argc_two_or_more);
717 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
718 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
719
720 // Handle construction of an empty array of a certain size. Bail out if size
721 // is to large to actually allocate an elements array.
722 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
723 __ j(greater_equal, call_generic_code);
724
725 // rax: argc
726 // rdx: array_size (smi)
727 // rdi: constructor
728 // esp[0]: return address
729 // esp[8]: argument
730 AllocateJSArray(masm,
731 rdi,
732 rdx,
733 rbx,
734 rcx,
735 r8,
736 r9,
737 true,
738 call_generic_code);
739 __ IncrementCounter(&Counters::array_function_native, 1);
740 __ movq(rax, rbx);
741 __ ret(2 * kPointerSize);
742
743 // Handle construction of an array from a list of arguments.
744 __ bind(&argc_two_or_more);
745 __ movq(rdx, rax);
746 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
747 // rax: argc
748 // rdx: array_size (smi)
749 // rdi: constructor
750 // esp[0] : return address
751 // esp[8] : last argument
752 AllocateJSArray(masm,
753 rdi,
754 rdx,
755 rbx,
756 rcx,
757 r8,
758 r9,
759 false,
760 call_generic_code);
761 __ IncrementCounter(&Counters::array_function_native, 1);
762
763 // rax: argc
764 // rbx: JSArray
765 // rcx: elements_array
766 // r8: elements_array_end (untagged)
767 // esp[0]: return address
768 // esp[8]: last argument
769
770 // Location of the last argument
771 __ lea(r9, Operand(rsp, kPointerSize));
772
773 // Location of the first array element (Parameter fill_with_holes to
774 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
775 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
776
777 // rax: argc
778 // rbx: JSArray
779 // rdx: location of the first array element
780 // r9: location of the last argument
781 // esp[0]: return address
782 // esp[8]: last argument
783 Label loop, entry;
784 __ movq(rcx, rax);
785 __ jmp(&entry);
786 __ bind(&loop);
787 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
788 __ movq(Operand(rdx, 0), kScratchRegister);
789 __ addq(rdx, Immediate(kPointerSize));
790 __ bind(&entry);
791 __ decq(rcx);
792 __ j(greater_equal, &loop);
793
794 // Remove caller arguments from the stack and return.
795 // rax: argc
796 // rbx: JSArray
797 // esp[0]: return address
798 // esp[8]: last argument
799 __ pop(rcx);
800 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
801 __ push(rcx);
802 __ movq(rax, rbx);
803 __ ret(0);
804 }
805
806
807 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
808 // ----------- S t a t e -------------
809 // -- rax : argc
810 // -- rsp[0] : return address
811 // -- rsp[8] : last argument
812 // -----------------------------------
813 Label generic_array_code;
814
815 // Get the Array function.
816 GenerateLoadArrayFunction(masm, rdi);
817
818 if (FLAG_debug_code) {
819 // Initial map for the builtin Array function shoud be a map.
820 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
821 // Will both indicate a NULL and a Smi.
822 ASSERT(kSmiTag == 0);
823 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
824 __ Check(not_smi, "Unexpected initial map for Array function");
825 __ CmpObjectType(rbx, MAP_TYPE, rcx);
826 __ Check(equal, "Unexpected initial map for Array function");
827 }
828
829 // Run the native code for the Array function called as a normal function.
830 ArrayNativeCode(masm, &generic_array_code);
831
832 // Jump to the generic array code in case the specialized code cannot handle
833 // the construction.
834 __ bind(&generic_array_code);
835 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
836 Handle<Code> array_code(code);
837 __ Jump(array_code, RelocInfo::CODE_TARGET);
838 }
839
840
841 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
842 // ----------- S t a t e -------------
843 // -- rax : argc
844 // -- rdi : constructor
845 // -- rsp[0] : return address
846 // -- rsp[8] : last argument
847 // -----------------------------------
848 Label generic_constructor;
849
850 if (FLAG_debug_code) {
851 // The array construct code is only set for the builtin Array function which
852 // does always have a map.
853 GenerateLoadArrayFunction(masm, rbx);
854 __ cmpq(rdi, rbx);
855 __ Check(equal, "Unexpected Array function");
856 // Initial map for the builtin Array function should be a map.
857 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
858 // Will both indicate a NULL and a Smi.
859 ASSERT(kSmiTag == 0);
860 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
861 __ Check(not_smi, "Unexpected initial map for Array function");
862 __ CmpObjectType(rbx, MAP_TYPE, rcx);
863 __ Check(equal, "Unexpected initial map for Array function");
864 }
865
866 // Run the native code for the Array function called as constructor.
867 ArrayNativeCode(masm, &generic_constructor);
868
869 // Jump to the generic construct code in case the specialized code cannot
870 // handle the construction.
871 __ bind(&generic_constructor);
872 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
873 Handle<Code> generic_construct_stub(code);
874 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
875 }
876
877
878 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
879 // TODO(849): implement custom construct stub.
880 // Generate a copy of the generic stub for now.
881 Generate_JSConstructStubGeneric(masm);
882 }
883
884
76 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { 885 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
77 // ----------- S t a t e ------------- 886 // ----------- S t a t e -------------
78 // -- rax: number of arguments 887 // -- rax: number of arguments
79 // -- rdi: constructor function 888 // -- rdi: constructor function
80 // ----------------------------------- 889 // -----------------------------------
81 890
82 Label non_function_call; 891 Label non_function_call;
83 // Check that function is not a smi. 892 // Check that function is not a smi.
84 __ JumpIfSmi(rdi, &non_function_call); 893 __ JumpIfSmi(rdi, &non_function_call);
85 // Check that function is a JSFunction. 894 // Check that function is a JSFunction.
(...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after
552 // Restore function and tear down temporary frame. 1361 // Restore function and tear down temporary frame.
553 __ pop(rdi); 1362 __ pop(rdi);
554 __ LeaveInternalFrame(); 1363 __ LeaveInternalFrame();
555 1364
556 // Do a tail-call of the compiled function. 1365 // Do a tail-call of the compiled function.
557 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize)); 1366 __ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
558 __ jmp(rcx); 1367 __ jmp(rcx);
559 } 1368 }
560 1369
561 1370
562 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1371 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
563 Deoptimizer::BailoutType type) {
564 __ int3(); 1372 __ int3();
565 } 1373 }
566 1374
567 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
568 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
569 }
570
571 1375
572 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1376 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
573 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1377 __ int3();
574 } 1378 }
575 1379
576 1380
577 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 1381 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
578 __ int3(); 1382 __ int3();
579 } 1383 }
580 1384
581 1385
582 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
583 // Stack Layout:
584 // rsp[0]: Return address
585 // rsp[1]: Argument n
586 // rsp[2]: Argument n-1
587 // ...
588 // rsp[n]: Argument 1
589 // rsp[n+1]: Receiver (function to call)
590 //
591 // rax contains the number of arguments, n, not counting the receiver.
592 //
593 // 1. Make sure we have at least one argument.
594 { Label done;
595 __ testq(rax, rax);
596 __ j(not_zero, &done);
597 __ pop(rbx);
598 __ Push(Factory::undefined_value());
599 __ push(rbx);
600 __ incq(rax);
601 __ bind(&done);
602 }
603
604 // 2. Get the function to call (passed as receiver) from the stack, check
605 // if it is a function.
606 Label non_function;
607 // The function to call is at position n+1 on the stack.
608 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
609 __ JumpIfSmi(rdi, &non_function);
610 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
611 __ j(not_equal, &non_function);
612
613 // 3a. Patch the first argument if necessary when calling a function.
614 Label shift_arguments;
615 { Label convert_to_object, use_global_receiver, patch_receiver;
616 // Change context eagerly in case we need the global receiver.
617 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
618
619 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
620 __ JumpIfSmi(rbx, &convert_to_object);
621
622 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
623 __ j(equal, &use_global_receiver);
624 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
625 __ j(equal, &use_global_receiver);
626
627 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
628 __ j(below, &convert_to_object);
629 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
630 __ j(below_equal, &shift_arguments);
631
632 __ bind(&convert_to_object);
633 __ EnterInternalFrame(); // In order to preserve argument count.
634 __ Integer32ToSmi(rax, rax);
635 __ push(rax);
636
637 __ push(rbx);
638 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
639 __ movq(rbx, rax);
640
641 __ pop(rax);
642 __ SmiToInteger32(rax, rax);
643 __ LeaveInternalFrame();
644 // Restore the function to rdi.
645 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
646 __ jmp(&patch_receiver);
647
648 // Use the global receiver object from the called function as the
649 // receiver.
650 __ bind(&use_global_receiver);
651 const int kGlobalIndex =
652 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
653 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
654 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
655 __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
656 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
657
658 __ bind(&patch_receiver);
659 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
660
661 __ jmp(&shift_arguments);
662 }
663
664
665 // 3b. Patch the first argument when calling a non-function. The
666 // CALL_NON_FUNCTION builtin expects the non-function callee as
667 // receiver, so overwrite the first argument which will ultimately
668 // become the receiver.
669 __ bind(&non_function);
670 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
671 __ xor_(rdi, rdi);
672
673 // 4. Shift arguments and return address one slot down on the stack
674 // (overwriting the original receiver). Adjust argument count to make
675 // the original first argument the new receiver.
676 __ bind(&shift_arguments);
677 { Label loop;
678 __ movq(rcx, rax);
679 __ bind(&loop);
680 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
681 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
682 __ decq(rcx);
683 __ j(not_sign, &loop); // While non-negative (to copy return address).
684 __ pop(rbx); // Discard copy of return address.
685 __ decq(rax); // One fewer argument (first argument is new receiver).
686 }
687
688 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
689 { Label function;
690 __ testq(rdi, rdi);
691 __ j(not_zero, &function);
692 __ xor_(rbx, rbx);
693 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
694 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
695 RelocInfo::CODE_TARGET);
696 __ bind(&function);
697 }
698
699 // 5b. Get the code to call from the function and check that the number of
700 // expected arguments matches what we're providing. If so, jump
701 // (tail-call) to the code in register edx without checking arguments.
702 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
703 __ movsxlq(rbx,
704 FieldOperand(rdx,
705 SharedFunctionInfo::kFormalParameterCountOffset));
706 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
707 __ cmpq(rax, rbx);
708 __ j(not_equal,
709 Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
710 RelocInfo::CODE_TARGET);
711
712 ParameterCount expected(0);
713 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION);
714 }
715
716
717 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
718 // Stack at entry:
719 // rsp: return address
720 // rsp+8: arguments
721 // rsp+16: receiver ("this")
722 // rsp+24: function
723 __ EnterInternalFrame();
724 // Stack frame:
725 // rbp: Old base pointer
726 // rbp[1]: return address
727 // rbp[2]: function arguments
728 // rbp[3]: receiver
729 // rbp[4]: function
730 static const int kArgumentsOffset = 2 * kPointerSize;
731 static const int kReceiverOffset = 3 * kPointerSize;
732 static const int kFunctionOffset = 4 * kPointerSize;
733 __ push(Operand(rbp, kFunctionOffset));
734 __ push(Operand(rbp, kArgumentsOffset));
735 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
736
737 // Check the stack for overflow. We are not trying need to catch
738 // interruptions (e.g. debug break and preemption) here, so the "real stack
739 // limit" is checked.
740 Label okay;
741 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
742 __ movq(rcx, rsp);
743 // Make rcx the space we have left. The stack might already be overflowed
744 // here which will cause rcx to become negative.
745 __ subq(rcx, kScratchRegister);
746 // Make rdx the space we need for the array when it is unrolled onto the
747 // stack.
748 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
749 // Check if the arguments will overflow the stack.
750 __ cmpq(rcx, rdx);
751 __ j(greater, &okay); // Signed comparison.
752
753 // Out of stack space.
754 __ push(Operand(rbp, kFunctionOffset));
755 __ push(rax);
756 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
757 __ bind(&okay);
758 // End of stack check.
759
760 // Push current index and limit.
761 const int kLimitOffset =
762 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
763 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
764 __ push(rax); // limit
765 __ push(Immediate(0)); // index
766
767 // Change context eagerly to get the right global object if
768 // necessary.
769 __ movq(rdi, Operand(rbp, kFunctionOffset));
770 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
771
772 // Compute the receiver.
773 Label call_to_object, use_global_receiver, push_receiver;
774 __ movq(rbx, Operand(rbp, kReceiverOffset));
775 __ JumpIfSmi(rbx, &call_to_object);
776 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
777 __ j(equal, &use_global_receiver);
778 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
779 __ j(equal, &use_global_receiver);
780
781 // If given receiver is already a JavaScript object then there's no
782 // reason for converting it.
783 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
784 __ j(below, &call_to_object);
785 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE);
786 __ j(below_equal, &push_receiver);
787
788 // Convert the receiver to an object.
789 __ bind(&call_to_object);
790 __ push(rbx);
791 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
792 __ movq(rbx, rax);
793 __ jmp(&push_receiver);
794
795 // Use the current global receiver object as the receiver.
796 __ bind(&use_global_receiver);
797 const int kGlobalOffset =
798 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
799 __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
800 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
801 __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
802 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
803
804 // Push the receiver.
805 __ bind(&push_receiver);
806 __ push(rbx);
807
808 // Copy all arguments from the array to the stack.
809 Label entry, loop;
810 __ movq(rax, Operand(rbp, kIndexOffset));
811 __ jmp(&entry);
812 __ bind(&loop);
813 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
814
815 // Use inline caching to speed up access to arguments.
816 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
817 __ Call(ic, RelocInfo::CODE_TARGET);
818 // It is important that we do not have a test instruction after the
819 // call. A test instruction after the call is used to indicate that
820 // we have generated an inline version of the keyed load. In this
821 // case, we know that we are not generating a test instruction next.
822
823 // Push the nth argument.
824 __ push(rax);
825
826 // Update the index on the stack and in register rax.
827 __ movq(rax, Operand(rbp, kIndexOffset));
828 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
829 __ movq(Operand(rbp, kIndexOffset), rax);
830
831 __ bind(&entry);
832 __ cmpq(rax, Operand(rbp, kLimitOffset));
833 __ j(not_equal, &loop);
834
835 // Invoke the function.
836 ParameterCount actual(rax);
837 __ SmiToInteger32(rax, rax);
838 __ movq(rdi, Operand(rbp, kFunctionOffset));
839 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
840
841 __ LeaveInternalFrame();
842 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
843 }
844
845
846 // Number of empty elements to allocate for an empty array.
847 static const int kPreallocatedArrayElements = 4;
848
849
850 // Allocate an empty JSArray. The allocated array is put into the result
851 // register. If the parameter initial_capacity is larger than zero an elements
852 // backing store is allocated with this size and filled with the hole values.
853 // Otherwise the elements backing store is set to the empty FixedArray.
854 static void AllocateEmptyJSArray(MacroAssembler* masm,
855 Register array_function,
856 Register result,
857 Register scratch1,
858 Register scratch2,
859 Register scratch3,
860 int initial_capacity,
861 Label* gc_required) {
862 ASSERT(initial_capacity >= 0);
863
864 // Load the initial map from the array function.
865 __ movq(scratch1, FieldOperand(array_function,
866 JSFunction::kPrototypeOrInitialMapOffset));
867
868 // Allocate the JSArray object together with space for a fixed array with the
869 // requested elements.
870 int size = JSArray::kSize;
871 if (initial_capacity > 0) {
872 size += FixedArray::SizeFor(initial_capacity);
873 }
874 __ AllocateInNewSpace(size,
875 result,
876 scratch2,
877 scratch3,
878 gc_required,
879 TAG_OBJECT);
880
881 // Allocated the JSArray. Now initialize the fields except for the elements
882 // array.
883 // result: JSObject
884 // scratch1: initial map
885 // scratch2: start of next object
886 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
887 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
888 Factory::empty_fixed_array());
889 // Field JSArray::kElementsOffset is initialized later.
890 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
891
892 // If no storage is requested for the elements array just set the empty
893 // fixed array.
894 if (initial_capacity == 0) {
895 __ Move(FieldOperand(result, JSArray::kElementsOffset),
896 Factory::empty_fixed_array());
897 return;
898 }
899
900 // Calculate the location of the elements array and set elements array member
901 // of the JSArray.
902 // result: JSObject
903 // scratch2: start of next object
904 __ lea(scratch1, Operand(result, JSArray::kSize));
905 __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
906
907 // Initialize the FixedArray and fill it with holes. FixedArray length is
908 // stored as a smi.
909 // result: JSObject
910 // scratch1: elements array
911 // scratch2: start of next object
912 __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
913 Factory::fixed_array_map());
914 __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
915 Smi::FromInt(initial_capacity));
916
917 // Fill the FixedArray with the hole value. Inline the code if short.
918 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
919 static const int kLoopUnfoldLimit = 4;
920 ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
921 __ Move(scratch3, Factory::the_hole_value());
922 if (initial_capacity <= kLoopUnfoldLimit) {
923 // Use a scratch register here to have only one reloc info when unfolding
924 // the loop.
925 for (int i = 0; i < initial_capacity; i++) {
926 __ movq(FieldOperand(scratch1,
927 FixedArray::kHeaderSize + i * kPointerSize),
928 scratch3);
929 }
930 } else {
931 Label loop, entry;
932 __ jmp(&entry);
933 __ bind(&loop);
934 __ movq(Operand(scratch1, 0), scratch3);
935 __ addq(scratch1, Immediate(kPointerSize));
936 __ bind(&entry);
937 __ cmpq(scratch1, scratch2);
938 __ j(below, &loop);
939 }
940 }
941
942
943 // Allocate a JSArray with the number of elements stored in a register. The
944 // register array_function holds the built-in Array function and the register
945 // array_size holds the size of the array as a smi. The allocated array is put
946 // into the result register and beginning and end of the FixedArray elements
947 // storage is put into registers elements_array and elements_array_end (see
948 // below for when that is not the case). If the parameter fill_with_holes is
949 // true the allocated elements backing store is filled with the hole values
950 // otherwise it is left uninitialized. When the backing store is filled the
951 // register elements_array is scratched.
952 static void AllocateJSArray(MacroAssembler* masm,
953 Register array_function, // Array function.
954 Register array_size, // As a smi.
955 Register result,
956 Register elements_array,
957 Register elements_array_end,
958 Register scratch,
959 bool fill_with_hole,
960 Label* gc_required) {
961 Label not_empty, allocated;
962
963 // Load the initial map from the array function.
964 __ movq(elements_array,
965 FieldOperand(array_function,
966 JSFunction::kPrototypeOrInitialMapOffset));
967
968 // Check whether an empty sized array is requested.
969 __ testq(array_size, array_size);
970 __ j(not_zero, &not_empty);
971
972 // If an empty array is requested allocate a small elements array anyway. This
973 // keeps the code below free of special casing for the empty array.
974 int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
975 __ AllocateInNewSpace(size,
976 result,
977 elements_array_end,
978 scratch,
979 gc_required,
980 TAG_OBJECT);
981 __ jmp(&allocated);
982
983 // Allocate the JSArray object together with space for a FixedArray with the
984 // requested elements.
985 __ bind(&not_empty);
986 SmiIndex index =
987 masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
988 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
989 index.scale,
990 index.reg,
991 result,
992 elements_array_end,
993 scratch,
994 gc_required,
995 TAG_OBJECT);
996
997 // Allocated the JSArray. Now initialize the fields except for the elements
998 // array.
999 // result: JSObject
1000 // elements_array: initial map
1001 // elements_array_end: start of next object
1002 // array_size: size of array (smi)
1003 __ bind(&allocated);
1004 __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1005 __ Move(elements_array, Factory::empty_fixed_array());
1006 __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1007 // Field JSArray::kElementsOffset is initialized later.
1008 __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1009
1010 // Calculate the location of the elements array and set elements array member
1011 // of the JSArray.
1012 // result: JSObject
1013 // elements_array_end: start of next object
1014 // array_size: size of array (smi)
1015 __ lea(elements_array, Operand(result, JSArray::kSize));
1016 __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1017
1018 // Initialize the fixed array. FixedArray length is stored as a smi.
1019 // result: JSObject
1020 // elements_array: elements array
1021 // elements_array_end: start of next object
1022 // array_size: size of array (smi)
1023 __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1024 Factory::fixed_array_map());
1025 Label not_empty_2, fill_array;
1026 __ SmiTest(array_size);
1027 __ j(not_zero, &not_empty_2);
1028 // Length of the FixedArray is the number of pre-allocated elements even
1029 // though the actual JSArray has length 0.
1030 __ Move(FieldOperand(elements_array, FixedArray::kLengthOffset),
1031 Smi::FromInt(kPreallocatedArrayElements));
1032 __ jmp(&fill_array);
1033 __ bind(&not_empty_2);
1034 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1035 // same.
1036 __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1037
1038 // Fill the allocated FixedArray with the hole value if requested.
1039 // result: JSObject
1040 // elements_array: elements array
1041 // elements_array_end: start of next object
1042 __ bind(&fill_array);
1043 if (fill_with_hole) {
1044 Label loop, entry;
1045 __ Move(scratch, Factory::the_hole_value());
1046 __ lea(elements_array, Operand(elements_array,
1047 FixedArray::kHeaderSize - kHeapObjectTag));
1048 __ jmp(&entry);
1049 __ bind(&loop);
1050 __ movq(Operand(elements_array, 0), scratch);
1051 __ addq(elements_array, Immediate(kPointerSize));
1052 __ bind(&entry);
1053 __ cmpq(elements_array, elements_array_end);
1054 __ j(below, &loop);
1055 }
1056 }
1057
1058
1059 // Create a new array for the built-in Array function. This function allocates
1060 // the JSArray object and the FixedArray elements array and initializes these.
1061 // If the Array cannot be constructed in native code the runtime is called. This
1062 // function assumes the following state:
1063 // rdi: constructor (built-in Array function)
1064 // rax: argc
1065 // rsp[0]: return address
1066 // rsp[8]: last argument
1067 // This function is used for both construct and normal calls of Array. The only
1068 // difference between handling a construct call and a normal call is that for a
1069 // construct call the constructor function in rdi needs to be preserved for
1070 // entering the generic code. In both cases argc in rax needs to be preserved.
1071 // Both registers are preserved by this code so no need to differentiate between
1072 // a construct call and a normal call.
1073 static void ArrayNativeCode(MacroAssembler* masm,
1074 Label *call_generic_code) {
1075 Label argc_one_or_more, argc_two_or_more;
1076
1077 // Check for array construction with zero arguments.
1078 __ testq(rax, rax);
1079 __ j(not_zero, &argc_one_or_more);
1080
1081 // Handle construction of an empty array.
1082 AllocateEmptyJSArray(masm,
1083 rdi,
1084 rbx,
1085 rcx,
1086 rdx,
1087 r8,
1088 kPreallocatedArrayElements,
1089 call_generic_code);
1090 __ IncrementCounter(&Counters::array_function_native, 1);
1091 __ movq(rax, rbx);
1092 __ ret(kPointerSize);
1093
1094 // Check for one argument. Bail out if argument is not smi or if it is
1095 // negative.
1096 __ bind(&argc_one_or_more);
1097 __ cmpq(rax, Immediate(1));
1098 __ j(not_equal, &argc_two_or_more);
1099 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1100 __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1101
1102 // Handle construction of an empty array of a certain size. Bail out if size
1103 // is to large to actually allocate an elements array.
1104 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
1105 __ j(greater_equal, call_generic_code);
1106
1107 // rax: argc
1108 // rdx: array_size (smi)
1109 // rdi: constructor
1110 // esp[0]: return address
1111 // esp[8]: argument
1112 AllocateJSArray(masm,
1113 rdi,
1114 rdx,
1115 rbx,
1116 rcx,
1117 r8,
1118 r9,
1119 true,
1120 call_generic_code);
1121 __ IncrementCounter(&Counters::array_function_native, 1);
1122 __ movq(rax, rbx);
1123 __ ret(2 * kPointerSize);
1124
1125 // Handle construction of an array from a list of arguments.
1126 __ bind(&argc_two_or_more);
1127 __ movq(rdx, rax);
1128 __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1129 // rax: argc
1130 // rdx: array_size (smi)
1131 // rdi: constructor
1132 // esp[0] : return address
1133 // esp[8] : last argument
1134 AllocateJSArray(masm,
1135 rdi,
1136 rdx,
1137 rbx,
1138 rcx,
1139 r8,
1140 r9,
1141 false,
1142 call_generic_code);
1143 __ IncrementCounter(&Counters::array_function_native, 1);
1144
1145 // rax: argc
1146 // rbx: JSArray
1147 // rcx: elements_array
1148 // r8: elements_array_end (untagged)
1149 // esp[0]: return address
1150 // esp[8]: last argument
1151
1152 // Location of the last argument
1153 __ lea(r9, Operand(rsp, kPointerSize));
1154
1155 // Location of the first array element (Parameter fill_with_holes to
1156 // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1157 __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1158
1159 // rax: argc
1160 // rbx: JSArray
1161 // rdx: location of the first array element
1162 // r9: location of the last argument
1163 // esp[0]: return address
1164 // esp[8]: last argument
1165 Label loop, entry;
1166 __ movq(rcx, rax);
1167 __ jmp(&entry);
1168 __ bind(&loop);
1169 __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
1170 __ movq(Operand(rdx, 0), kScratchRegister);
1171 __ addq(rdx, Immediate(kPointerSize));
1172 __ bind(&entry);
1173 __ decq(rcx);
1174 __ j(greater_equal, &loop);
1175
1176 // Remove caller arguments from the stack and return.
1177 // rax: argc
1178 // rbx: JSArray
1179 // esp[0]: return address
1180 // esp[8]: last argument
1181 __ pop(rcx);
1182 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1183 __ push(rcx);
1184 __ movq(rax, rbx);
1185 __ ret(0);
1186 }
1187
1188
1189 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1190 // ----------- S t a t e -------------
1191 // -- rax : argc
1192 // -- rsp[0] : return address
1193 // -- rsp[8] : last argument
1194 // -----------------------------------
1195 Label generic_array_code;
1196
1197 // Get the Array function.
1198 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1199
1200 if (FLAG_debug_code) {
1201 // Initial map for the builtin Array function shoud be a map.
1202 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1203 // Will both indicate a NULL and a Smi.
1204 ASSERT(kSmiTag == 0);
1205 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1206 __ Check(not_smi, "Unexpected initial map for Array function");
1207 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1208 __ Check(equal, "Unexpected initial map for Array function");
1209 }
1210
1211 // Run the native code for the Array function called as a normal function.
1212 ArrayNativeCode(masm, &generic_array_code);
1213
1214 // Jump to the generic array code in case the specialized code cannot handle
1215 // the construction.
1216 __ bind(&generic_array_code);
1217 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
1218 Handle<Code> array_code(code);
1219 __ Jump(array_code, RelocInfo::CODE_TARGET);
1220 }
1221
1222
1223 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1224 // ----------- S t a t e -------------
1225 // -- rax : argc
1226 // -- rdi : constructor
1227 // -- rsp[0] : return address
1228 // -- rsp[8] : last argument
1229 // -----------------------------------
1230 Label generic_constructor;
1231
1232 if (FLAG_debug_code) {
1233 // The array construct code is only set for the builtin Array function which
1234 // does always have a map.
1235 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rbx);
1236 __ cmpq(rdi, rbx);
1237 __ Check(equal, "Unexpected Array function");
1238 // Initial map for the builtin Array function should be a map.
1239 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1240 // Will both indicate a NULL and a Smi.
1241 ASSERT(kSmiTag == 0);
1242 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1243 __ Check(not_smi, "Unexpected initial map for Array function");
1244 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1245 __ Check(equal, "Unexpected initial map for Array function");
1246 }
1247
1248 // Run the native code for the Array function called as constructor.
1249 ArrayNativeCode(masm, &generic_constructor);
1250
1251 // Jump to the generic construct code in case the specialized code cannot
1252 // handle the construction.
1253 __ bind(&generic_constructor);
1254 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1255 Handle<Code> generic_construct_stub(code);
1256 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1257 }
1258
1259
1260 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1261 // TODO(849): implement custom construct stub.
1262 // Generate a copy of the generic stub for now.
1263 Generate_JSConstructStubGeneric(masm);
1264 }
1265
1266
1267 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1268 __ push(rbp);
1269 __ movq(rbp, rsp);
1270
1271 // Store the arguments adaptor context sentinel.
1272 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1273
1274 // Push the function on the stack.
1275 __ push(rdi);
1276
1277 // Preserve the number of arguments on the stack. Must preserve both
1278 // rax and rbx because these registers are used when copying the
1279 // arguments and the receiver.
1280 __ Integer32ToSmi(rcx, rax);
1281 __ push(rcx);
1282 }
1283
1284
1285 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1286 // Retrieve the number of arguments from the stack. Number is a Smi.
1287 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1288
1289 // Leave the frame.
1290 __ movq(rsp, rbp);
1291 __ pop(rbp);
1292
1293 // Remove caller arguments from the stack.
1294 __ pop(rcx);
1295 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1296 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1297 __ push(rcx);
1298 }
1299
1300
1301 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1302 // ----------- S t a t e -------------
1303 // -- rax : actual number of arguments
1304 // -- rbx : expected number of arguments
1305 // -- rdx : code entry to call
1306 // -----------------------------------
1307
1308 Label invoke, dont_adapt_arguments;
1309 __ IncrementCounter(&Counters::arguments_adaptors, 1);
1310
1311 Label enough, too_few;
1312 __ cmpq(rax, rbx);
1313 __ j(less, &too_few);
1314 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1315 __ j(equal, &dont_adapt_arguments);
1316
1317 { // Enough parameters: Actual >= expected.
1318 __ bind(&enough);
1319 EnterArgumentsAdaptorFrame(masm);
1320
1321 // Copy receiver and all expected arguments.
1322 const int offset = StandardFrameConstants::kCallerSPOffset;
1323 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1324 __ movq(rcx, Immediate(-1)); // account for receiver
1325
1326 Label copy;
1327 __ bind(&copy);
1328 __ incq(rcx);
1329 __ push(Operand(rax, 0));
1330 __ subq(rax, Immediate(kPointerSize));
1331 __ cmpq(rcx, rbx);
1332 __ j(less, &copy);
1333 __ jmp(&invoke);
1334 }
1335
1336 { // Too few parameters: Actual < expected.
1337 __ bind(&too_few);
1338 EnterArgumentsAdaptorFrame(masm);
1339
1340 // Copy receiver and all actual arguments.
1341 const int offset = StandardFrameConstants::kCallerSPOffset;
1342 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1343 __ movq(rcx, Immediate(-1)); // account for receiver
1344
1345 Label copy;
1346 __ bind(&copy);
1347 __ incq(rcx);
1348 __ push(Operand(rdi, 0));
1349 __ subq(rdi, Immediate(kPointerSize));
1350 __ cmpq(rcx, rax);
1351 __ j(less, &copy);
1352
1353 // Fill remaining expected arguments with undefined values.
1354 Label fill;
1355 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1356 __ bind(&fill);
1357 __ incq(rcx);
1358 __ push(kScratchRegister);
1359 __ cmpq(rcx, rbx);
1360 __ j(less, &fill);
1361
1362 // Restore function pointer.
1363 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1364 }
1365
1366 // Call the entry point.
1367 __ bind(&invoke);
1368 __ call(rdx);
1369
1370 // Leave frame and return.
1371 LeaveArgumentsAdaptorFrame(masm);
1372 __ ret(0);
1373
1374 // -------------------------------------------
1375 // Dont adapt arguments.
1376 // -------------------------------------------
1377 __ bind(&dont_adapt_arguments);
1378 __ jmp(rdx);
1379 }
1380
1381
1382 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1386 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1383 __ int3(); 1387 __ int3();
1384 } 1388 }
1385 1389
1386 1390
1387 #undef __
1388
1389 } } // namespace v8::internal 1391 } } // namespace v8::internal
1390 1392
1391 #endif // V8_TARGET_ARCH_X64 1393 #endif // V8_TARGET_ARCH_X64
OLDNEW
« ChangeLog ('K') | « src/version.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698