Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(332)

Side by Side Diff: src/sh4/builtins-sh4.cc

Issue 11275184: First draft of the sh4 port Base URL: http://github.com/v8/v8.git@master
Patch Set: Use GYP and fixe some typos Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/sh4/assembler-sh4-inl.h ('k') | src/sh4/checks-sh4.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2011-2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_ARM) 30 #if defined(V8_TARGET_ARCH_SH4)
31 31
32 #include "codegen.h" 32 #include "codegen.h"
33 #include "debug.h" 33 #include "debug.h"
34 #include "deoptimizer.h" 34 #include "deoptimizer.h"
35 #include "full-codegen.h" 35 #include "full-codegen.h"
36 #include "runtime.h" 36 #include "runtime.h"
37 37
38 namespace v8 { 38 namespace v8 {
39 namespace internal { 39 namespace internal {
40 40
41 41
42 #define __ ACCESS_MASM(masm) 42 #define __ ACCESS_MASM(masm)
43 43
44 44
45 // Define register map
46 #include "map-sh4.h"
47
48
45 void Builtins::Generate_Adaptor(MacroAssembler* masm, 49 void Builtins::Generate_Adaptor(MacroAssembler* masm,
46 CFunctionId id, 50 CFunctionId id,
47 BuiltinExtraArguments extra_args) { 51 BuiltinExtraArguments extra_args) {
48 // ----------- S t a t e ------------- 52 // ----------- S t a t e -------------
49 // -- r0 : number of arguments excluding receiver 53 // -- r0 : number of arguments excluding receiver
50 // -- r1 : called function (only guaranteed when 54 // -- r1 : called function (only guaranteed when
51 // extra_args requires it) 55 // extra_args requires it)
52 // -- cp : context 56 // -- cp : context
53 // -- sp[0] : last argument 57 // -- sp[0] : last argument
54 // -- ... 58 // -- ...
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
178 for (int i = 0; i < initial_capacity; i++) { 182 for (int i = 0; i < initial_capacity; i++) {
179 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); 183 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
180 } 184 }
181 } else { 185 } else {
182 Label loop, entry; 186 Label loop, entry;
183 __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize)); 187 __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
184 __ b(&entry); 188 __ b(&entry);
185 __ bind(&loop); 189 __ bind(&loop);
186 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); 190 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
187 __ bind(&entry); 191 __ bind(&entry);
188 __ cmp(scratch1, scratch2); 192 __ cmpge(scratch1, scratch2);
189 __ b(lt, &loop); 193 __ bf(&loop);
190 } 194 }
191 } 195 }
192 196
193 // Allocate a JSArray with the number of elements stored in a register. The 197 // Allocate a JSArray with the number of elements stored in a register. The
194 // register array_function holds the built-in Array function and the register 198 // register array_function holds the built-in Array function and the register
195 // array_size holds the size of the array as a smi. The allocated array is put 199 // array_size holds the size of the array as a smi. The allocated array is put
196 // into the result register and beginning and end of the FixedArray elements 200 // into the result register and beginning and end of the FixedArray elements
197 // storage is put into registers elements_array_storage and elements_array_end 201 // storage is put into registers elements_array_storage and elements_array_end
198 // (see below for when that is not the case). If the parameter fill_with_holes 202 // (see below for when that is not the case). If the parameter fill_with_holes
199 // is true the allocated elements backing store is filled with the hole values 203 // is true the allocated elements backing store is filled with the hole values
(...skipping 16 matching lines...) Expand all
216 if (FLAG_debug_code) { // Assert that array size is not zero. 220 if (FLAG_debug_code) { // Assert that array size is not zero.
217 __ tst(array_size, array_size); 221 __ tst(array_size, array_size);
218 __ Assert(ne, "array size is unexpectedly 0"); 222 __ Assert(ne, "array size is unexpectedly 0");
219 } 223 }
220 224
221 // Allocate the JSArray object together with space for a FixedArray with the 225 // Allocate the JSArray object together with space for a FixedArray with the
222 // requested number of elements. 226 // requested number of elements.
223 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 227 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
224 __ mov(elements_array_end, 228 __ mov(elements_array_end,
225 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize)); 229 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
230 __ asr(scratch1, array_size, Operand(kSmiTagSize));
226 __ add(elements_array_end, 231 __ add(elements_array_end,
227 elements_array_end, 232 elements_array_end,
228 Operand(array_size, ASR, kSmiTagSize)); 233 scratch1);
229 __ AllocateInNewSpace( 234 __ AllocateInNewSpace(
230 elements_array_end, 235 elements_array_end,
231 result, 236 result,
232 scratch1, 237 scratch1,
233 scratch2, 238 scratch2,
234 gc_required, 239 gc_required,
235 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); 240 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
236 241
237 // Allocated the JSArray. Now initialize the fields except for the elements 242 // Allocated the JSArray. Now initialize the fields except for the elements
238 // array. 243 // array.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
270 STATIC_ASSERT(kSmiTag == 0); 275 STATIC_ASSERT(kSmiTag == 0);
271 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); 276 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
272 __ str(array_size, 277 __ str(array_size,
273 MemOperand(elements_array_storage, kPointerSize, PostIndex)); 278 MemOperand(elements_array_storage, kPointerSize, PostIndex));
274 279
275 // Calculate elements array and elements array end. 280 // Calculate elements array and elements array end.
276 // result: JSObject 281 // result: JSObject
277 // elements_array_storage: elements array element storage 282 // elements_array_storage: elements array element storage
278 // array_size: smi-tagged size of elements array 283 // array_size: smi-tagged size of elements array
279 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); 284 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
285 __ lsl(elements_array_end, array_size,
286 Operand(kPointerSizeLog2 - kSmiTagSize));
280 __ add(elements_array_end, 287 __ add(elements_array_end,
281 elements_array_storage, 288 elements_array_storage,
282 Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize)); 289 elements_array_end);
283 290
284 // Fill the allocated FixedArray with the hole value if requested. 291 // Fill the allocated FixedArray with the hole value if requested.
285 // result: JSObject 292 // result: JSObject
286 // elements_array_storage: elements array element storage 293 // elements_array_storage: elements array element storage
287 // elements_array_end: start of next object 294 // elements_array_end: start of next object
288 if (fill_with_hole) { 295 if (fill_with_hole) {
289 Label loop, entry; 296 Label loop, entry;
290 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex); 297 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
291 __ jmp(&entry); 298 __ jmp_near(&entry);
292 __ bind(&loop); 299 __ bind(&loop);
293 __ str(scratch1, 300 __ str(scratch1,
294 MemOperand(elements_array_storage, kPointerSize, PostIndex)); 301 MemOperand(elements_array_storage, kPointerSize, PostIndex));
295 __ bind(&entry); 302 __ bind(&entry);
296 __ cmp(elements_array_storage, elements_array_end); 303 __ cmpge(elements_array_storage, elements_array_end);
297 __ b(lt, &loop); 304 __ bf(&loop);
298 } 305 }
299 } 306 }
300 307
301 // Create a new array for the built-in Array function. This function allocates 308 // Create a new array for the built-in Array function. This function allocates
302 // the JSArray object and the FixedArray elements array and initializes these. 309 // the JSArray object and the FixedArray elements array and initializes these.
303 // If the Array cannot be constructed in native code the runtime is called. This 310 // If the Array cannot be constructed in native code the runtime is called. This
304 // function assumes the following state: 311 // function assumes the following state:
305 // r0: argc 312 // r0: argc
306 // r1: constructor (built-in Array function) 313 // r1: constructor (built-in Array function)
307 // lr: return address 314 // lr: return address
(...skipping 20 matching lines...) Expand all
328 r1, 335 r1,
329 r2, 336 r2,
330 r3, 337 r3,
331 r4, 338 r4,
332 r5, 339 r5,
333 call_generic_code); 340 call_generic_code);
334 __ IncrementCounter(counters->array_function_native(), 1, r3, r4); 341 __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
335 // Set up return value, remove receiver from stack and return. 342 // Set up return value, remove receiver from stack and return.
336 __ mov(r0, r2); 343 __ mov(r0, r2);
337 __ add(sp, sp, Operand(kPointerSize)); 344 __ add(sp, sp, Operand(kPointerSize));
338 __ Jump(lr); 345 __ Ret();
339 346
340 // Check for one argument. Bail out if argument is not smi or if it is 347 // Check for one argument. Bail out if argument is not smi or if it is
341 // negative. 348 // negative.
342 __ bind(&argc_one_or_more); 349 __ bind(&argc_one_or_more);
343 __ cmp(r0, Operand(1)); 350 __ cmp(r0, Operand(1));
344 __ b(ne, &argc_two_or_more); 351 __ b(ne, &argc_two_or_more);
345 STATIC_ASSERT(kSmiTag == 0); 352 STATIC_ASSERT(kSmiTag == 0);
346 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack. 353 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
347 __ tst(r2, r2); 354 __ tst(r2, r2);
348 __ b(ne, &not_empty_array); 355 __ b(ne, &not_empty_array);
349 __ Drop(1); // Adjust stack. 356 __ Drop(1); // Adjust stack.
350 __ mov(r0, Operand(0)); // Treat this as a call with argc of zero. 357 __ mov(r0, Operand(0)); // Treat this as a call with argc of zero.
351 __ b(&empty_array); 358 __ b(&empty_array);
352 359
353 __ bind(&not_empty_array); 360 __ bind(&not_empty_array);
354 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC); 361 __ land(r3, r2, Operand(kIntptrSignBit | kSmiTagMask));
362 __ cmp(r3, Operand(0));
355 __ b(ne, call_generic_code); 363 __ b(ne, call_generic_code);
356 364
357 // Handle construction of an empty array of a certain size. Bail out if size 365 // Handle construction of an empty array of a certain size. Bail out if size
358 // is too large to actually allocate an elements array. 366 // is too large to actually allocate an elements array.
359 STATIC_ASSERT(kSmiTag == 0); 367 STATIC_ASSERT(kSmiTag == 0);
360 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize)); 368 __ cmpge(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
361 __ b(ge, call_generic_code); 369 __ bt(call_generic_code);
362 370
363 // r0: argc 371 // r0: argc
364 // r1: constructor 372 // r1: constructor
365 // r2: array_size (smi) 373 // r2: array_size (smi)
366 // sp[0]: argument 374 // sp[0]: argument
367 AllocateJSArray(masm, 375 AllocateJSArray(masm,
368 r1, 376 r1,
369 r2, 377 r2,
370 r3, 378 r3,
371 r4, 379 r4,
372 r5, 380 r5,
373 r6, 381 r6,
374 r7, 382 r7,
375 true, 383 true,
376 call_generic_code); 384 call_generic_code);
377 __ IncrementCounter(counters->array_function_native(), 1, r2, r4); 385 __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
378 // Set up return value, remove receiver and argument from stack and return. 386 // Setup return value, remove receiver and argument from stack and return.
379 __ mov(r0, r3); 387 __ mov(r0, r3);
380 __ add(sp, sp, Operand(2 * kPointerSize)); 388 __ add(sp, sp, Operand(2 * kPointerSize));
381 __ Jump(lr); 389 __ Ret();
382 390
383 // Handle construction of an array from a list of arguments. 391 // Handle construction of an array from a list of arguments.
384 __ bind(&argc_two_or_more); 392 __ bind(&argc_two_or_more);
385 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi. 393 __ lsl(r2, r0, Operand(kSmiTagSize)); // Convet argc to a smi.
386 394
387 // r0: argc 395 // r0: argc
388 // r1: constructor 396 // r1: constructor
389 // r2: array_size (smi) 397 // r2: array_size (smi)
390 // sp[0]: last argument 398 // sp[0]: last argument
391 AllocateJSArray(masm, 399 AllocateJSArray(masm,
392 r1, 400 r1,
393 r2, 401 r2,
394 r3, 402 r3,
395 r4, 403 r4,
(...skipping 16 matching lines...) Expand all
412 Label loop, entry; 420 Label loop, entry;
413 __ mov(r7, sp); 421 __ mov(r7, sp);
414 __ jmp(&entry); 422 __ jmp(&entry);
415 __ bind(&loop); 423 __ bind(&loop);
416 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex)); 424 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
417 if (FLAG_smi_only_arrays) { 425 if (FLAG_smi_only_arrays) {
418 __ JumpIfNotSmi(r2, &has_non_smi_element); 426 __ JumpIfNotSmi(r2, &has_non_smi_element);
419 } 427 }
420 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex)); 428 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
421 __ bind(&entry); 429 __ bind(&entry);
422 __ cmp(r4, r5); 430 __ cmpge(r4, r5);
423 __ b(lt, &loop); 431 __ bf(&loop);
424 432
425 __ bind(&finish); 433 __ bind(&finish);
426 __ mov(sp, r7); 434 __ mov(sp, r7);
427 435
428 // Remove caller arguments and receiver from the stack, setup return value and 436 // Remove caller arguments and receiver from the stack, setup return value and
429 // return. 437 // return.
430 // r0: argc 438 // r0: argc
431 // r3: JSArray 439 // r3: JSArray
432 // sp[0]: receiver 440 // sp[0]: receiver
433 __ add(sp, sp, Operand(kPointerSize)); 441 __ add(sp, sp, Operand(kPointerSize));
434 __ mov(r0, r3); 442 __ mov(r0, r3);
435 __ Jump(lr); 443 __ Ret();
436 444
437 __ bind(&has_non_smi_element); 445 __ bind(&has_non_smi_element);
438 // Double values are handled by the runtime. 446 // Double values are handled by the runtime.
439 __ CheckMap( 447 __ CheckMap(
440 r2, r9, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK); 448 r2, r9, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
441 __ bind(&cant_transition_map); 449 __ bind(&cant_transition_map);
442 __ UndoAllocationInNewSpace(r3, r4); 450 __ UndoAllocationInNewSpace(r3, r4);
443 __ b(call_generic_code); 451 __ b(call_generic_code);
444 452
445 __ bind(&not_double); 453 __ bind(&not_double);
(...skipping 12 matching lines...) Expand all
458 r9, 466 r9,
459 kLRHasNotBeenSaved, 467 kLRHasNotBeenSaved,
460 kDontSaveFPRegs, 468 kDontSaveFPRegs,
461 EMIT_REMEMBERED_SET, 469 EMIT_REMEMBERED_SET,
462 OMIT_SMI_CHECK); 470 OMIT_SMI_CHECK);
463 Label loop2; 471 Label loop2;
464 __ sub(r7, r7, Operand(kPointerSize)); 472 __ sub(r7, r7, Operand(kPointerSize));
465 __ bind(&loop2); 473 __ bind(&loop2);
466 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex)); 474 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
467 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex)); 475 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
468 __ cmp(r4, r5); 476 __ cmpge(r4, r5);
469 __ b(lt, &loop2); 477 __ bf(&loop2);
470 __ b(&finish); 478 __ b(&finish);
471 } 479 }
472 480
473 481
474 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 482 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
475 // ----------- S t a t e ------------- 483 // ----------- S t a t e -------------
476 // -- r0 : number of arguments 484 // -- r0 : number of arguments
477 // -- lr : return address 485 // -- lr : return address
478 // -- sp[...]: constructor arguments 486 // -- sp[...]: constructor arguments
479 // ----------------------------------- 487 // -----------------------------------
480 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 488 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
481 489
482 // Get the InternalArray function. 490 // Get the InternalArray function.
483 GenerateLoadInternalArrayFunction(masm, r1); 491 GenerateLoadInternalArrayFunction(masm, r1);
484 492
485 if (FLAG_debug_code) { 493 if (FLAG_debug_code) {
486 // Initial map for the builtin InternalArray functions should be maps. 494 // Initial map for the builtin InternalArray functions should be maps.
487 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 495 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
488 __ tst(r2, Operand(kSmiTagMask)); 496 __ tst(r2, Operand(kSmiTagMask));
489 __ Assert(ne, "Unexpected initial map for InternalArray function"); 497 __ Assert(ne, "Unexpected initial map for InternalArray function");
490 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 498 __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
491 __ Assert(eq, "Unexpected initial map for InternalArray function"); 499 __ Assert(eq, "Unexpected initial map for InternalArray function");
492 } 500 }
493 501
494 // Run the native code for the InternalArray function called as a normal 502 // Run the native code for the InternalArray function called as a normal
495 // function. 503 // function.
496 ArrayNativeCode(masm, &generic_array_code); 504 ArrayNativeCode(masm, &generic_array_code);
497 505
498 // Jump to the generic array code if the specialized code cannot handle the 506 // Jump to the generic array code if the specialized code cannot handle the
499 // construction. 507 // construction.
500 __ bind(&generic_array_code); 508 __ bind(&generic_array_code);
(...skipping 13 matching lines...) Expand all
514 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 522 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
515 523
516 // Get the Array function. 524 // Get the Array function.
517 GenerateLoadArrayFunction(masm, r1); 525 GenerateLoadArrayFunction(masm, r1);
518 526
519 if (FLAG_debug_code) { 527 if (FLAG_debug_code) {
520 // Initial map for the builtin Array functions should be maps. 528 // Initial map for the builtin Array functions should be maps.
521 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 529 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
522 __ tst(r2, Operand(kSmiTagMask)); 530 __ tst(r2, Operand(kSmiTagMask));
523 __ Assert(ne, "Unexpected initial map for Array function"); 531 __ Assert(ne, "Unexpected initial map for Array function");
524 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 532 __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
525 __ Assert(eq, "Unexpected initial map for Array function"); 533 __ Assert(eq, "Unexpected initial map for Array function");
526 } 534 }
527 535
528 // Run the native code for the Array function called as a normal function. 536 // Run the native code for the Array function called as a normal function.
529 ArrayNativeCode(masm, &generic_array_code); 537 ArrayNativeCode(masm, &generic_array_code);
530 538
531 // Jump to the generic array code if the specialized code cannot handle 539 // Jump to the generic array code if the specialized code cannot handle
532 // the construction. 540 // the construction.
533 __ bind(&generic_array_code); 541 __ bind(&generic_array_code);
534 542
(...skipping 12 matching lines...) Expand all
547 // ----------------------------------- 555 // -----------------------------------
548 Label generic_constructor; 556 Label generic_constructor;
549 557
550 if (FLAG_debug_code) { 558 if (FLAG_debug_code) {
551 // The array construct code is only set for the builtin and internal 559 // The array construct code is only set for the builtin and internal
552 // Array functions which always have a map. 560 // Array functions which always have a map.
553 // Initial map for the builtin Array function should be a map. 561 // Initial map for the builtin Array function should be a map.
554 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 562 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
555 __ tst(r2, Operand(kSmiTagMask)); 563 __ tst(r2, Operand(kSmiTagMask));
556 __ Assert(ne, "Unexpected initial map for Array function"); 564 __ Assert(ne, "Unexpected initial map for Array function");
557 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 565 __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
558 __ Assert(eq, "Unexpected initial map for Array function"); 566 __ Assert(eq, "Unexpected initial map for Array function");
559 } 567 }
560 568
561 // Run the native code for the Array function called as a constructor. 569 // Run the native code for the Array function called as a constructor.
562 ArrayNativeCode(masm, &generic_constructor); 570 ArrayNativeCode(masm, &generic_constructor);
563 571
564 // Jump to the generic construct code in case the specialized code cannot 572 // Jump to the generic construct code in case the specialized code cannot
565 // handle the construction. 573 // handle the construction.
566 __ bind(&generic_constructor); 574 __ bind(&generic_constructor);
567 Handle<Code> generic_construct_stub = 575 Handle<Code> generic_construct_stub =
568 masm->isolate()->builtins()->JSConstructStubGeneric(); 576 masm->isolate()->builtins()->JSConstructStubGeneric();
569 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); 577 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
570 } 578 }
571 579
572 580
573 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { 581 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
574 // ----------- S t a t e ------------- 582 // ----------- S t a t e -------------
575 // -- r0 : number of arguments 583 // -- r0 : number of arguments
576 // -- r1 : constructor function 584 // -- r1 : constructor function
577 // -- lr : return address 585 // -- lr : return address
578 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 586 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
579 // -- sp[argc * 4] : receiver 587 // -- sp[argc * 4] : receiver
580 // ----------------------------------- 588 // -----------------------------------
581 Counters* counters = masm->isolate()->counters(); 589 Counters* counters = masm->isolate()->counters();
582 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3); 590 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
583 591
584 Register function = r1; 592 Register function = r1;
585 if (FLAG_debug_code) { 593 if (FLAG_debug_code) {
586 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2); 594 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
587 __ cmp(function, Operand(r2)); 595 __ cmp(function, r2);
588 __ Assert(eq, "Unexpected String function"); 596 __ Assert(eq, "Unexpected String function");
589 } 597 }
590 598
591 // Load the first arguments in r0 and get rid of the rest. 599 // Load the first arguments in r0 and get rid of the rest.
592 Label no_arguments; 600 Label no_arguments;
593 __ cmp(r0, Operand(0, RelocInfo::NONE)); 601 __ cmp(r0, Operand(0, RelocInfo::NONE));
594 __ b(eq, &no_arguments); 602 __ b(eq, &no_arguments);
595 // First args = sp[(argc - 1) * 4]. 603 // First args = sp[(argc - 1) * 4].
596 __ sub(r0, r0, Operand(1)); 604 __ sub(r0, r0, Operand(1));
597 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex)); 605 __ lsl(r0, r0, Operand(kPointerSizeLog2));
606 __ ldr(r0, MemOperand(sp, r0));
598 // sp now point to args[0], drop args[0] + receiver. 607 // sp now point to args[0], drop args[0] + receiver.
599 __ Drop(2); 608 __ Drop(2);
600 609
601 Register argument = r2; 610 Register argument = r2;
602 Label not_cached, argument_is_string; 611 Label not_cached, argument_is_string;
603 NumberToStringStub::GenerateLookupNumberStringCache( 612 NumberToStringStub::GenerateLookupNumberStringCache(
604 masm, 613 masm,
605 r0, // Input. 614 r0, // Input.
606 argument, // Result. 615 argument, // Result.
607 r3, // Scratch. 616 r3, // Scratch.
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 __ CallRuntime(Runtime::kNewStringWrapper, 1); 705 __ CallRuntime(Runtime::kNewStringWrapper, 1);
697 } 706 }
698 __ Ret(); 707 __ Ret();
699 } 708 }
700 709
701 710
702 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 711 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
703 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 712 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
704 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset)); 713 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
705 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); 714 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
706 __ mov(pc, r2); 715 __ jmp(r2);
707 } 716 }
708 717
709 718
710 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { 719 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
711 GenerateTailCallToSharedCode(masm); 720 GenerateTailCallToSharedCode(masm);
712 } 721 }
713 722
714 723
715 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) { 724 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
716 { 725 {
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
749 // Should never count constructions for api objects. 758 // Should never count constructions for api objects.
750 ASSERT(!is_api_function || !count_constructions); 759 ASSERT(!is_api_function || !count_constructions);
751 760
752 Isolate* isolate = masm->isolate(); 761 Isolate* isolate = masm->isolate();
753 762
754 // Enter a construct frame. 763 // Enter a construct frame.
755 { 764 {
756 FrameScope scope(masm, StackFrame::CONSTRUCT); 765 FrameScope scope(masm, StackFrame::CONSTRUCT);
757 766
758 // Preserve the two incoming parameters on the stack. 767 // Preserve the two incoming parameters on the stack.
759 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 768 __ lsl(r0, r0, Operand(kSmiTagSize));
760 __ push(r0); // Smi-tagged arguments count. 769 __ push(r0); // Smi-tagged arguments count.
761 __ push(r1); // Constructor function. 770 __ push(r1); // Constructor function.
762 771
763 // Try to allocate the object without transitioning into C code. If any of 772 // Try to allocate the object without transitioning into C code. If any of
764 // the preconditions is not met, the code bails out to the runtime call. 773 // the preconditions is not met, the code bails out to the runtime call.
765 Label rt_call, allocated; 774 Label rt_call, allocated;
766 if (FLAG_inline_new) { 775 if (FLAG_inline_new) {
767 Label undo_allocation; 776 Label undo_allocation;
768 #ifdef ENABLE_DEBUGGER_SUPPORT 777 #ifdef ENABLE_DEBUGGER_SUPPORT
769 ExternalReference debug_step_in_fp = 778 ExternalReference debug_step_in_fp =
770 ExternalReference::debug_step_in_fp_address(isolate); 779 ExternalReference::debug_step_in_fp_address(isolate);
771 __ mov(r2, Operand(debug_step_in_fp)); 780 __ mov(r2, Operand(debug_step_in_fp));
772 __ ldr(r2, MemOperand(r2)); 781 __ ldr(r2, MemOperand(r2));
773 __ tst(r2, r2); 782 __ tst(r2, r2);
774 __ b(ne, &rt_call); 783 __ b(ne, &rt_call);
775 #endif 784 #endif
776 785
777 // Load the initial map and verify that it is in fact a map. 786 // Load the initial map and verify that it is in fact a map.
778 // r1: constructor function 787 // r1: constructor function
779 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 788 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
780 __ JumpIfSmi(r2, &rt_call); 789 __ JumpIfSmi(r2, &rt_call);
781 __ CompareObjectType(r2, r3, r4, MAP_TYPE); 790 __ CompareObjectType(r2, r3, r4, MAP_TYPE, eq);
782 __ b(ne, &rt_call); 791 __ b(ne, &rt_call);
783 792
784 // Check that the constructor is not constructing a JSFunction (see 793 // Check that the constructor is not constructing a JSFunction (see comments
785 // comments in Runtime_NewObject in runtime.cc). In which case the 794 // in Runtime_NewObject in runtime.cc). In which case the initial map's
786 // initial map's instance type would be JS_FUNCTION_TYPE. 795 // instance type would be JS_FUNCTION_TYPE.
787 // r1: constructor function 796 // r1: constructor function
788 // r2: initial map 797 // r2: initial map
789 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE); 798 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE, eq);
790 __ b(eq, &rt_call); 799 __ b(eq, &rt_call);
791 800
792 if (count_constructions) { 801 if (count_constructions) {
793 Label allocate; 802 Label allocate;
794 // Decrease generous allocation count. 803 // Decrease generous allocation count.
795 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 804 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
796 MemOperand constructor_count = 805 MemOperand constructor_count =
797 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset); 806 FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
798 __ ldrb(r4, constructor_count); 807 __ ldrb(r4, constructor_count);
799 __ sub(r4, r4, Operand(1), SetCC); 808 __ sub(r4, r4, Operand(1));
809 __ cmpeq(r4, Operand(0));
800 __ strb(r4, constructor_count); 810 __ strb(r4, constructor_count);
801 __ b(ne, &allocate); 811 __ b(ne, &allocate);
802 812
803 __ Push(r1, r2); 813 __ Push(r1, r2);
804 814
805 __ push(r1); // constructor 815 __ push(r1); // constructor
806 // The call will replace the stub, so the countdown is only done once. 816 // The call will replace the stub, so the countdown is only done once.
807 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 817 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
808 818
809 __ pop(r2); 819 __ pop(r2);
(...skipping 22 matching lines...) Expand all
832 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 842 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
833 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset); 843 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
834 __ str(r6, MemOperand(r5, kPointerSize, PostIndex)); 844 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
835 845
836 // Fill all the in-object properties with the appropriate filler. 846 // Fill all the in-object properties with the appropriate filler.
837 // r1: constructor function 847 // r1: constructor function
838 // r2: initial map 848 // r2: initial map
839 // r3: object size (in words) 849 // r3: object size (in words)
840 // r4: JSObject (not tagged) 850 // r4: JSObject (not tagged)
841 // r5: First in-object property of JSObject (not tagged) 851 // r5: First in-object property of JSObject (not tagged)
842 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 852 __ lsl(r6, r3, Operand(kPointerSizeLog2));
853 __ add(r6, r4, r6); // End of object.
843 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); 854 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
844 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); 855 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
845 if (count_constructions) { 856 if (count_constructions) {
846 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); 857 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
847 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, 858 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
848 kBitsPerByte); 859 kBitsPerByte);
849 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2)); 860 __ lsl(r0, r0, Operand(kPointerSizeLog2));
861 __ add(r0, r5, r0);
850 // r0: offset of first field after pre-allocated fields 862 // r0: offset of first field after pre-allocated fields
851 if (FLAG_debug_code) { 863 if (FLAG_debug_code) {
852 __ cmp(r0, r6); 864 __ cmpgt(r0, r6);
853 __ Assert(le, "Unexpected number of pre-allocated property fields."); 865 __ Assert(ne, "Unexpected number of pre-allocated property fields.");
854 } 866 }
855 __ InitializeFieldsWithFiller(r5, r0, r7); 867 __ InitializeFieldsWithFiller(r5, r0, r7);
856 // To allow for truncation. 868 // To allow for truncation.
857 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex); 869 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
858 } 870 }
859 __ InitializeFieldsWithFiller(r5, r6, r7); 871 __ InitializeFieldsWithFiller(r5, r6, r7);
860 872
861 // Add the object tag to make the JSObject real, so that we can continue 873 // Add the object tag to make the JSObject real, so that we can continue
862 // and jump into the continuation code at any time from now on. Any 874 // and jump into the continuation code at any time from now on. Any
863 // failures need to undo the allocation, so that the heap is in a 875 // failures need to undo the allocation, so that the heap is in a
864 // consistent state and verifiable. 876 // consistent state and verifiable.
865 __ add(r4, r4, Operand(kHeapObjectTag)); 877 __ add(r4, r4, Operand(kHeapObjectTag));
866 878
867 // Check if a non-empty properties array is needed. Continue with 879 // Check if a non-empty properties array is needed. Continue with
868 // allocated object if not fall through to runtime call if it is. 880 // allocated object if not fall through to runtime call if it is.
869 // r1: constructor function 881 // r1: constructor function
870 // r4: JSObject 882 // r4: JSObject
871 // r5: start of next object (not tagged) 883 // r5: start of next object (not tagged)
872 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset)); 884 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
873 // The field instance sizes contains both pre-allocated property fields 885 // The field instance sizes contains both pre-allocated property fields
874 // and in-object properties. 886 // and in-object properties.
875 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); 887 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
876 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, 888 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
877 kBitsPerByte); 889 kBitsPerByte);
878 __ add(r3, r3, Operand(r6)); 890 __ add(r3, r3, r6);
879 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte, 891 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
880 kBitsPerByte); 892 kBitsPerByte);
881 __ sub(r3, r3, Operand(r6), SetCC); 893 __ sub(r3, r3, r6);
894 __ cmpeq(r6, Operand(0));
882 895
883 // Done if no extra properties are to be allocated. 896 // Done if no extra properties are to be allocated.
884 __ b(eq, &allocated); 897 __ b(eq, &allocated);
885 __ Assert(pl, "Property allocation count failed."); 898 __ cmpge(r6, Operand(0));
899 __ Assert(eq, "Property allocation count failed.");
886 900
887 // Scale the number of elements by pointer size and add the header for 901 // Scale the number of elements by pointer size and add the header for
888 // FixedArrays to the start of the next object calculation from above. 902 // FixedArrays to the start of the next object calculation from above.
889 // r1: constructor 903 // r1: constructor
890 // r3: number of elements in properties array 904 // r3: number of elements in properties array
891 // r4: JSObject 905 // r4: JSObject
892 // r5: start of next object 906 // r5: start of next object
893 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize)); 907 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
894 __ AllocateInNewSpace( 908 __ AllocateInNewSpace(
895 r0, 909 r0,
896 r5, 910 r5,
897 r6, 911 r6,
898 r2, 912 r2,
899 &undo_allocation, 913 &undo_allocation,
900 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS)); 914 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
901 915
902 // Initialize the FixedArray. 916 // Initialize the FixedArray.
903 // r1: constructor 917 // r1: constructor
904 // r3: number of elements in properties array 918 // r3: number of elements in properties array
905 // r4: JSObject 919 // r4: JSObject
906 // r5: FixedArray (not tagged) 920 // r5: FixedArray (not tagged)
907 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex); 921 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
908 __ mov(r2, r5); 922 __ mov(r2, r5);
909 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset); 923 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
910 __ str(r6, MemOperand(r2, kPointerSize, PostIndex)); 924 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
911 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); 925 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
912 __ mov(r0, Operand(r3, LSL, kSmiTagSize)); 926 __ lsl(r0, r3, Operand(kSmiTagSize));
913 __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); 927 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
914 928
915 // Initialize the fields to undefined. 929 // Initialize the fields to undefined.
916 // r1: constructor function 930 // r1: constructor function
917 // r2: First element of FixedArray (not tagged) 931 // r2: First element of FixedArray (not tagged)
918 // r3: number of elements in properties array 932 // r3: number of elements in properties array
919 // r4: JSObject 933 // r4: JSObject
920 // r5: FixedArray (not tagged) 934 // r5: FixedArray (not tagged)
921 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object. 935 __ lsl(r6, r3, Operand(kPointerSizeLog2));
936 __ add(r6, r2, r6); // End of object.
922 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); 937 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
923 { Label loop, entry; 938 { Label loop, entry;
924 if (count_constructions) { 939 if (count_constructions) {
925 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); 940 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
926 } else if (FLAG_debug_code) { 941 } else if (FLAG_debug_code) {
927 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); 942 __ LoadRoot(cp, Heap::kUndefinedValueRootIndex);
928 __ cmp(r7, r8); 943 __ cmp(r7, cp);
929 __ Assert(eq, "Undefined value not loaded."); 944 __ Assert(eq, "Undefined value not loaded.");
930 } 945 }
931 __ b(&entry); 946 __ b(&entry);
932 __ bind(&loop); 947 __ bind(&loop);
933 __ str(r7, MemOperand(r2, kPointerSize, PostIndex)); 948 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
934 __ bind(&entry); 949 __ bind(&entry);
935 __ cmp(r2, r6); 950 __ cmpge(r2, r6);
936 __ b(lt, &loop); 951 __ bf(&loop);
937 } 952 }
938 953
939 // Store the initialized FixedArray into the properties field of 954 // Store the initialized FixedArray into the properties field of
940 // the JSObject 955 // the JSObject
941 // r1: constructor function 956 // r1: constructor function
942 // r4: JSObject 957 // r4: JSObject
943 // r5: FixedArray (not tagged) 958 // r5: FixedArray (not tagged)
944 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag. 959 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
945 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset)); 960 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
946 961
(...skipping 28 matching lines...) Expand all
975 // sp[1]: receiver 990 // sp[1]: receiver
976 // sp[2]: constructor function 991 // sp[2]: constructor function
977 // sp[3]: number of arguments (smi-tagged) 992 // sp[3]: number of arguments (smi-tagged)
978 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 993 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
979 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); 994 __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
980 995
981 // Set up pointer to last argument. 996 // Set up pointer to last argument.
982 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); 997 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
983 998
984 // Set up number of arguments for function call below 999 // Set up number of arguments for function call below
985 __ mov(r0, Operand(r3, LSR, kSmiTagSize)); 1000 __ lsr(r0, r3, Operand(kSmiTagSize));
986 1001
987 // Copy arguments and receiver to the expression stack. 1002 // Copy arguments and receiver to the expression stack.
988 // r0: number of arguments 1003 // r0: number of arguments
989 // r1: constructor function 1004 // r1: constructor function
990 // r2: address of last argument (caller sp) 1005 // r2: address of last argument (caller sp)
991 // r3: number of arguments (smi-tagged) 1006 // r3: number of arguments (smi-tagged)
992 // sp[0]: receiver 1007 // sp[0]: receiver
993 // sp[1]: receiver 1008 // sp[1]: receiver
994 // sp[2]: constructor function 1009 // sp[2]: constructor function
995 // sp[3]: number of arguments (smi-tagged) 1010 // sp[3]: number of arguments (smi-tagged)
996 Label loop, entry; 1011 Label loop, entry;
997 __ b(&entry); 1012 __ b(&entry);
998 __ bind(&loop); 1013 __ bind(&loop);
999 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1)); 1014 __ lsl(ip, r3, Operand(kPointerSizeLog2 - 1));
1015 __ ldr(ip, MemOperand(r2, ip));
1000 __ push(ip); 1016 __ push(ip);
1001 __ bind(&entry); 1017 __ bind(&entry);
1002 __ sub(r3, r3, Operand(2), SetCC); 1018 __ sub(r3, r3, Operand(2));
1003 __ b(ge, &loop); 1019 __ cmpge(r3, Operand(0));
1020 __ bt(&loop);
1004 1021
1005 // Call the function. 1022 // Call the function.
1006 // r0: number of arguments 1023 // r0: number of arguments
1007 // r1: constructor function 1024 // r1: constructor function
1008 if (is_api_function) { 1025 if (is_api_function) {
1009 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1026 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1010 Handle<Code> code = 1027 Handle<Code> code =
1011 masm->isolate()->builtins()->HandleApiCallConstruct(); 1028 masm->isolate()->builtins()->HandleApiCallConstruct();
1012 ParameterCount expected(0); 1029 ParameterCount expected(0);
1013 __ InvokeCode(code, expected, expected, 1030 __ InvokeCode(code, expected, expected,
(...skipping 23 matching lines...) Expand all
1037 1054
1038 // If the result is a smi, it is *not* an object in the ECMA sense. 1055 // If the result is a smi, it is *not* an object in the ECMA sense.
1039 // r0: result 1056 // r0: result
1040 // sp[0]: receiver (newly allocated object) 1057 // sp[0]: receiver (newly allocated object)
1041 // sp[1]: constructor function 1058 // sp[1]: constructor function
1042 // sp[2]: number of arguments (smi-tagged) 1059 // sp[2]: number of arguments (smi-tagged)
1043 __ JumpIfSmi(r0, &use_receiver); 1060 __ JumpIfSmi(r0, &use_receiver);
1044 1061
1045 // If the type of the result (stored in its map) is less than 1062 // If the type of the result (stored in its map) is less than
1046 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. 1063 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
1047 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE); 1064 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE, ge);
1048 __ b(ge, &exit); 1065 __ bt(&exit);
1049 1066
1050 // Throw away the result of the constructor invocation and use the 1067 // Throw away the result of the constructor invocation and use the
1051 // on-stack receiver as the result. 1068 // on-stack receiver as the result.
1052 __ bind(&use_receiver); 1069 __ bind(&use_receiver);
1053 __ ldr(r0, MemOperand(sp)); 1070 __ ldr(r0, MemOperand(sp));
1054 1071
1055 // Remove receiver from the stack, remove caller arguments, and 1072 // Remove receiver from the stack, remove caller arguments, and
1056 // return. 1073 // return.
1057 __ bind(&exit); 1074 __ bind(&exit);
1058 // r0: result 1075 // r0: result
1059 // sp[0]: receiver (newly allocated object) 1076 // sp[0]: receiver (newly allocated object)
1060 // sp[1]: constructor function 1077 // sp[1]: constructor function
1061 // sp[2]: number of arguments (smi-tagged) 1078 // sp[2]: number of arguments (smi-tagged)
1062 __ ldr(r1, MemOperand(sp, 2 * kPointerSize)); 1079 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1063 1080
1064 // Leave construct frame. 1081 // Leave construct frame.
1065 } 1082 }
1066 1083
1067 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1)); 1084 __ lsl(ip, r1, Operand(kPointerSizeLog2 - 1));
1085 __ add(sp, sp, ip);
1068 __ add(sp, sp, Operand(kPointerSize)); 1086 __ add(sp, sp, Operand(kPointerSize));
1069 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2); 1087 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
1070 __ Jump(lr); 1088 __ rts();
1071 } 1089 }
1072 1090
1073 1091
1074 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { 1092 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1075 Generate_JSConstructStubHelper(masm, false, true); 1093 Generate_JSConstructStubHelper(masm, false, true);
1076 } 1094 }
1077 1095
1078 1096
1079 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 1097 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1080 Generate_JSConstructStubHelper(masm, false, false); 1098 Generate_JSConstructStubHelper(masm, false, false);
1081 } 1099 }
1082 1100
1083 1101
1084 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 1102 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1085 Generate_JSConstructStubHelper(masm, true, false); 1103 Generate_JSConstructStubHelper(masm, true, false);
1086 } 1104 }
1087 1105
1088 1106
1089 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 1107 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1090 bool is_construct) { 1108 bool is_construct) {
1091 // Called from Generate_JS_Entry 1109 // Called from Generate_JS_Entry
1092 // r0: code entry 1110 // r0: code entry
1093 // r1: function 1111 // r1: function
1094 // r2: receiver 1112 // r2: receiver
1095 // r3: argc 1113 // r3: argc
1096 // r4: argv 1114 // r4: argv (JSEntryStub does set it)
1097 // r5-r7, cp may be clobbered 1115 // r5-r7, cp may be clobbered
1098 1116
1099 // Clear the context before we push it when entering the internal frame. 1117 // Clear the context before we push it when entering the internal frame.
1100 __ mov(cp, Operand(0, RelocInfo::NONE)); 1118 __ mov(cp, Operand(0, RelocInfo::NONE));
1101 1119
1102 // Enter an internal frame. 1120 // Enter an internal frame.
1103 { 1121 {
1104 FrameScope scope(masm, StackFrame::INTERNAL); 1122 FrameScope scope(masm, StackFrame::INTERNAL);
1105 1123
1106 // Set up the context from the function argument. 1124 // Set up the context from the function argument.
1107 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1125 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1108 1126
1109 __ InitializeRootRegister(); 1127 __ InitializeRootRegister();
1110 1128
1111 // Push the function and the receiver onto the stack. 1129 // Push the function and the receiver onto the stack.
1112 __ push(r1); 1130 __ push(r1);
1113 __ push(r2); 1131 __ push(r2);
1114 1132
1115 // Copy arguments to the stack in a loop. 1133 // Copy arguments to the stack in a loop.
1116 // r1: function 1134 // r1: function
1117 // r3: argc 1135 // r3: argc
1118 // r4: argv, i.e. points to first arg 1136 // r4: argv, i.e. points to first arg
1119 Label loop, entry; 1137 Label loop, entry;
1120 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2)); 1138 __ lsl(r2, r3, Operand(kPointerSizeLog2));
1139 __ add(r2, r4, r2);
1121 // r2 points past last arg. 1140 // r2 points past last arg.
1122 __ b(&entry); 1141 __ b_near(&entry);
1123 __ bind(&loop); 1142 __ bind(&loop);
1124 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter 1143 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1125 __ ldr(r0, MemOperand(r0)); // dereference handle 1144 __ ldr(r0, MemOperand(r0)); // dereference handle
1126 __ push(r0); // push parameter 1145 __ push(r0); // push parameter
1127 __ bind(&entry); 1146 __ bind(&entry);
1128 __ cmp(r4, r2); 1147 __ cmp(r4, r2);
1129 __ b(ne, &loop); 1148 __ b(ne, &loop);
1130 1149
1131 // Initialize all JavaScript callee-saved registers, since they will be seen 1150 // Initialize all JavaScript callee-saved registers, since they will be seen
1132 // by the garbage collector as part of handlers. 1151 // by the garbage collector as part of handlers.
1133 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 1152 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1134 __ mov(r5, Operand(r4)); 1153 __ mov(sh4_r5, r4);
1135 __ mov(r6, Operand(r4)); 1154 __ mov(sh4_r6, r4);
1136 __ mov(r7, Operand(r4)); 1155 __ mov(sh4_r7, r4);
1137 if (kR9Available == 1) { 1156 __ mov(sh4_r8, r4);
1138 __ mov(r9, Operand(r4)); 1157 __ mov(sh4_r9, r4);
1139 }
1140 1158
1141 // Invoke the code and pass argc as r0. 1159 // Invoke the code and pass argc as r0.
1142 __ mov(r0, Operand(r3)); 1160 __ mov(r0, r3);
1161
1162 // r0: argc
1163 // r1: function
1143 if (is_construct) { 1164 if (is_construct) {
1144 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 1165 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
1145 __ CallStub(&stub); 1166 __ CallStub(&stub);
1146 } else { 1167 } else {
1147 ParameterCount actual(r0); 1168 ParameterCount actual(r0);
1148 __ InvokeFunction(r1, actual, CALL_FUNCTION, 1169 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1149 NullCallWrapper(), CALL_AS_METHOD); 1170 NullCallWrapper(), CALL_AS_METHOD);
1150 } 1171 }
1151 // Exit the JS frame and remove the parameters (except function), and 1172 // Exit the JS frame and remove the parameters (except function), and
1152 // return. 1173 // return.
1153 // Respect ABI stack constraint. 1174 // Respect ABI stack constraint.
1154 } 1175 }
1155 __ Jump(lr); 1176 __ rts();
1156 1177
1157 // r0: result 1178 // r0: result
1158 } 1179 }
1159 1180
1160 1181
1161 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 1182 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1162 Generate_JSEntryTrampolineHelper(masm, false); 1183 Generate_JSEntryTrampolineHelper(masm, false);
1163 } 1184 }
1164 1185
1165 1186
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1235 __ push(r0); 1256 __ push(r0);
1236 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 1257 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1237 } 1258 }
1238 1259
1239 // Get the full codegen state from the stack and untag it -> r6. 1260 // Get the full codegen state from the stack and untag it -> r6.
1240 __ ldr(r6, MemOperand(sp, 0 * kPointerSize)); 1261 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1241 __ SmiUntag(r6); 1262 __ SmiUntag(r6);
1242 // Switch on the state. 1263 // Switch on the state.
1243 Label with_tos_register, unknown_state; 1264 Label with_tos_register, unknown_state;
1244 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS)); 1265 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1245 __ b(ne, &with_tos_register); 1266 __ b(ne, &with_tos_register, Label::kNear);
1246 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state. 1267 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1247 __ Ret(); 1268 __ Ret();
1248 1269
1249 __ bind(&with_tos_register); 1270 __ bind(&with_tos_register);
1250 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); 1271 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1251 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG)); 1272 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1252 __ b(ne, &unknown_state); 1273 __ b(ne, &unknown_state, Label::kNear);
1253 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state. 1274 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1254 __ Ret(); 1275 __ Ret();
1255 1276
1256 __ bind(&unknown_state); 1277 __ bind(&unknown_state);
1257 __ stop("no cases left"); 1278 __ stop("no cases left");
1258 } 1279 }
1259 1280
1260 1281
1261 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1282 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1262 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1283 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1263 } 1284 }
1264 1285
1265 1286
1266 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1287 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1267 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1288 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1268 } 1289 }
1269 1290
1270 1291
1271 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 1292 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1272 // For now, we are relying on the fact that Runtime::NotifyOSR 1293 // For now, we are relying on the fact that Runtime::NotifyOSR
1273 // doesn't do any garbage collection which allows us to save/restore 1294 // doesn't do any garbage collection which allows us to save/restore
1274 // the registers without worrying about which of them contain 1295 // the registers without worrying about which of them contain
1275 // pointers. This seems a bit fragile. 1296 // pointers. This seems a bit fragile.
1276 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); 1297 __ Push(pr, fp);
1298 __ pushm(kJSCallerSaved | kCalleeSaved);
1277 { 1299 {
1278 FrameScope scope(masm, StackFrame::INTERNAL); 1300 FrameScope scope(masm, StackFrame::INTERNAL);
1279 __ CallRuntime(Runtime::kNotifyOSR, 0); 1301 __ CallRuntime(Runtime::kNotifyOSR, 0);
1280 } 1302 }
1281 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit()); 1303 __ popm(kJSCallerSaved | kCalleeSaved);
1304 __ Pop(pr, fp);
1282 __ Ret(); 1305 __ Ret();
1283 } 1306 }
1284 1307
1285 1308
1286 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1309 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1287 CpuFeatures::TryForceFeatureScope scope(VFP3); 1310 __ UNIMPLEMENTED_BREAK();
1288 if (!CPU::SupportsCrankshaft()) {
1289 __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1290 return;
1291 }
1292
1293 // Lookup the function in the JavaScript frame and push it as an
1294 // argument to the on-stack replacement function.
1295 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1296 {
1297 FrameScope scope(masm, StackFrame::INTERNAL);
1298 __ push(r0);
1299 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1300 }
1301
1302 // If the result was -1 it means that we couldn't optimize the
1303 // function. Just return and continue in the unoptimized version.
1304 Label skip;
1305 __ cmp(r0, Operand(Smi::FromInt(-1)));
1306 __ b(ne, &skip);
1307 __ Ret();
1308
1309 __ bind(&skip);
1310 // Untag the AST id and push it on the stack.
1311 __ SmiUntag(r0);
1312 __ push(r0);
1313
1314 // Generate the code for doing the frame-to-frame translation using
1315 // the deoptimizer infrastructure.
1316 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1317 generator.Generate();
1318 } 1311 }
1319 1312
1320 1313
1321 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 1314 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1322 // 1. Make sure we have at least one argument. 1315 // 1. Make sure we have at least one argument.
1323 // r0: actual number of arguments 1316 // r0: actual number of arguments
1324 { Label done; 1317 { Label done;
1325 __ cmp(r0, Operand(0)); 1318 __ tst(r0, r0);
1326 __ b(ne, &done); 1319 __ b(ne, &done, Label::kNear);
1327 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 1320 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1328 __ push(r2); 1321 __ push(r2);
1329 __ add(r0, r0, Operand(1)); 1322 __ add(r0, r0, Operand(1));
1330 __ bind(&done); 1323 __ bind(&done);
1331 } 1324 }
1332 1325
1333 // 2. Get the function to call (passed as receiver) from the stack, check 1326 // 2. Get the function to call (passed as receiver) from the stack, check
1334 // if it is a function. 1327 // if it is a function.
1335 // r0: actual number of arguments 1328 // r0: actual number of arguments
1336 Label slow, non_function; 1329 Label slow, non_function;
1337 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1330 __ lsl(r1, r0, Operand(kPointerSizeLog2));
1331 __ ldr(r1, MemOperand(sp, r1));
1338 __ JumpIfSmi(r1, &non_function); 1332 __ JumpIfSmi(r1, &non_function);
1339 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1333 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE, eq);
1340 __ b(ne, &slow); 1334 __ b(ne, &slow);
1341 1335
1342 // 3a. Patch the first argument if necessary when calling a function. 1336 // 3a. Patch the first argument if necessary when calling a function.
1343 // r0: actual number of arguments 1337 // r0: actual number of arguments
1344 // r1: function 1338 // r1: function
1345 Label shift_arguments; 1339 Label shift_arguments;
1346 __ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION 1340 __ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION
1347 { Label convert_to_object, use_global_receiver, patch_receiver; 1341 { Label convert_to_object, use_global_receiver, patch_receiver;
1348 // Change context eagerly in case we need the global receiver. 1342 // Change context eagerly in case we need the global receiver.
1349 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1343 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1350 1344
1351 // Do not transform the receiver for strict mode functions. 1345 // Do not transform the receiver for strict mode functions.
1352 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1346 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1353 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); 1347 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1354 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + 1348 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1355 kSmiTagSize))); 1349 kSmiTagSize)));
1356 __ b(ne, &shift_arguments); 1350 __ b(ne, &shift_arguments);
1357 1351
1358 // Do not transform the receiver for native (Compilerhints already in r3). 1352 // Do not transform the receiver for native (Compilerhints already in r3).
1359 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); 1353 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1360 __ b(ne, &shift_arguments); 1354 __ b(ne, &shift_arguments);
1361 1355
1362 // Compute the receiver in non-strict mode. 1356 // Compute the receiver in non-strict mode.
1363 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); 1357 __ lsl(r2, r0, Operand(kPointerSizeLog2));
1358 __ add(r2, sp, r2);
1364 __ ldr(r2, MemOperand(r2, -kPointerSize)); 1359 __ ldr(r2, MemOperand(r2, -kPointerSize));
1365 // r0: actual number of arguments 1360 // r0: actual number of arguments
1366 // r1: function 1361 // r1: function
1367 // r2: first argument 1362 // r2: first argument
1368 __ JumpIfSmi(r2, &convert_to_object); 1363 __ JumpIfSmi(r2, &convert_to_object, Label::kNear);
1369 1364
1370 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); 1365 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1371 __ cmp(r2, r3); 1366 __ cmp(r2, r3);
1372 __ b(eq, &use_global_receiver); 1367 __ b(eq, &use_global_receiver);
1373 __ LoadRoot(r3, Heap::kNullValueRootIndex); 1368 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1374 __ cmp(r2, r3); 1369 __ cmp(r2, r3);
1375 __ b(eq, &use_global_receiver); 1370 __ b(eq, &use_global_receiver);
1376 1371
1377 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1372 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1378 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); 1373 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE, ge);
1379 __ b(ge, &shift_arguments); 1374 __ bt(&shift_arguments);
1380 1375
1381 __ bind(&convert_to_object); 1376 __ bind(&convert_to_object);
1382 1377
1383 { 1378 {
1384 // Enter an internal frame in order to preserve argument count. 1379 // Enter an internal frame in order to preserve argument count.
1385 FrameScope scope(masm, StackFrame::INTERNAL); 1380 FrameScope scope(masm, StackFrame::INTERNAL);
1386 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged. 1381 __ lsl(r0, r0, Operand(kSmiTagSize)); // Smi-tagged.
1387 __ push(r0); 1382 __ push(r0);
1388 1383
1389 __ push(r2); 1384 __ push(r2);
1390 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1385 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1391 __ mov(r2, r0); 1386 __ mov(r2, r0);
1392 1387
1393 __ pop(r0); 1388 __ pop(r0);
1394 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1389 __ asr(r0, r0, Operand(kSmiTagSize));
1395 1390
1396 // Exit the internal frame. 1391 // Exit the internal frame.
1397 } 1392 }
1398 1393
1399 // Restore the function to r1, and the flag to r4. 1394 // Restore the function to r1.
1400 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); 1395 __ lsl(r1, r0, Operand(kPointerSizeLog2));
1396 __ ldr(r1, MemOperand(sp, r1));
1401 __ mov(r4, Operand(0, RelocInfo::NONE)); 1397 __ mov(r4, Operand(0, RelocInfo::NONE));
1402 __ jmp(&patch_receiver); 1398 __ jmp_near(&patch_receiver);
1403 1399
1404 // Use the global receiver object from the called function as the 1400 // Use the global receiver object from the called function as the
1405 // receiver. 1401 // receiver.
1406 __ bind(&use_global_receiver); 1402 __ bind(&use_global_receiver);
1407 const int kGlobalIndex = 1403 const int kGlobalIndex =
1408 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 1404 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1409 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex)); 1405 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
1410 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset)); 1406 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kNativeContextOffset));
1411 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex)); 1407 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
1412 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); 1408 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1413 1409
1414 __ bind(&patch_receiver); 1410 __ bind(&patch_receiver);
1415 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2)); 1411 __ lsl(r3, r0, Operand(kPointerSizeLog2));
1412 __ add(r3, sp, r3);
1416 __ str(r2, MemOperand(r3, -kPointerSize)); 1413 __ str(r2, MemOperand(r3, -kPointerSize));
1417 1414
1418 __ jmp(&shift_arguments); 1415 __ jmp(&shift_arguments);
1419 } 1416 }
1420 1417
1421 // 3b. Check for function proxy. 1418 // 3b. Check for function proxy.
1422 __ bind(&slow); 1419 __ bind(&slow);
1423 __ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy 1420 __ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy
1424 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); 1421 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1425 __ b(eq, &shift_arguments); 1422 __ b(eq, &shift_arguments);
1426 __ bind(&non_function); 1423 __ bind(&non_function);
1427 __ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function 1424 __ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function
1428 1425
1429 // 3c. Patch the first argument when calling a non-function. The 1426 // 3c. Patch the first argument when calling a non-function. The
1430 // CALL_NON_FUNCTION builtin expects the non-function callee as 1427 // CALL_NON_FUNCTION builtin expects the non-function callee as
1431 // receiver, so overwrite the first argument which will ultimately 1428 // receiver, so overwrite the first argument which will ultimately
1432 // become the receiver. 1429 // become the receiver.
1433 // r0: actual number of arguments 1430 // r0: actual number of arguments
1434 // r1: function 1431 // r1: function
1435 // r4: call type (0: JS function, 1: function proxy, 2: non-function) 1432 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1436 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); 1433 __ lsl(r2, r0, Operand(kPointerSizeLog2));
1434 __ add(r2, sp, r2);
1437 __ str(r1, MemOperand(r2, -kPointerSize)); 1435 __ str(r1, MemOperand(r2, -kPointerSize));
1438 1436
1439 // 4. Shift arguments and return address one slot down on the stack 1437 // 4. Shift arguments and return address one slot down on the stack
1440 // (overwriting the original receiver). Adjust argument count to make 1438 // (overwriting the original receiver). Adjust argument count to make
1441 // the original first argument the new receiver. 1439 // the original first argument the new receiver.
1442 // r0: actual number of arguments 1440 // r0: actual number of arguments
1443 // r1: function 1441 // r1: function
1444 // r4: call type (0: JS function, 1: function proxy, 2: non-function) 1442 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1445 __ bind(&shift_arguments); 1443 __ bind(&shift_arguments);
1446 { Label loop; 1444 { Label loop;
1447 // Calculate the copy start address (destination). Copy end address is sp. 1445 // Calculate the copy start address (destination). Copy end address is sp.
1448 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); 1446 __ lsl(r2, r0, Operand(kPointerSizeLog2));
1447 __ add(r2, sp, r2);
1449 1448
1450 __ bind(&loop); 1449 __ bind(&loop);
1451 __ ldr(ip, MemOperand(r2, -kPointerSize)); 1450 __ ldr(ip, MemOperand(r2, -kPointerSize));
1452 __ str(ip, MemOperand(r2)); 1451 __ str(ip, MemOperand(r2));
1453 __ sub(r2, r2, Operand(kPointerSize)); 1452 __ sub(r2, r2, Operand(kPointerSize));
1454 __ cmp(r2, sp); 1453 __ cmp(r2, sp);
1455 __ b(ne, &loop); 1454 __ b(ne, &loop);
1456 // Adjust the actual number of arguments and remove the top element 1455 // Adjust the actual number of arguments and remove the top element
1457 // (which is a copy of the last argument). 1456 // (which is a copy of the last argument).
1458 __ sub(r0, r0, Operand(1)); 1457 __ sub(r0, r0, Operand(1));
1459 __ pop(); 1458 __ pop();
1460 } 1459 }
1461 1460
1462 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, 1461 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1463 // or a function proxy via CALL_FUNCTION_PROXY. 1462 // or a function proxy via CALL_FUNCTION_PROXY.
1464 // r0: actual number of arguments 1463 // r0: actual number of arguments
1465 // r1: function 1464 // r1: function
1466 // r4: call type (0: JS function, 1: function proxy, 2: non-function) 1465 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1467 { Label function, non_proxy; 1466 { Label function, non_proxy;
1468 __ tst(r4, r4); 1467 __ tst(r4, r4);
1469 __ b(eq, &function); 1468 __ b(eq, &function, Label::kNear);
1470 // Expected number of arguments is 0 for CALL_NON_FUNCTION. 1469 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1471 __ mov(r2, Operand(0, RelocInfo::NONE)); 1470 __ mov(r2, Operand(0, RelocInfo::NONE));
1472 __ SetCallKind(r5, CALL_AS_METHOD); 1471 __ SetCallKind(r5, CALL_AS_METHOD);
1473 __ cmp(r4, Operand(1)); 1472 __ cmp(r4, Operand(1));
1474 __ b(ne, &non_proxy); 1473 __ b(ne, &non_proxy);
1475 1474
1476 __ push(r1); // re-add proxy object as additional argument 1475 __ push(r1); // re-add proxy object as additional argument
1477 __ add(r0, r0, Operand(1)); 1476 __ add(r0, r0, Operand(1));
1478 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY); 1477 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1479 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1478 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1480 RelocInfo::CODE_TARGET); 1479 RelocInfo::CODE_TARGET);
1481 1480
1482 __ bind(&non_proxy); 1481 __ bind(&non_proxy);
1483 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION); 1482 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1484 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1483 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1485 RelocInfo::CODE_TARGET); 1484 RelocInfo::CODE_TARGET);
1486 __ bind(&function); 1485 __ bind(&function);
1487 } 1486 }
1488 1487
1489 // 5b. Get the code to call from the function and check that the number of 1488 // 5b. Get the code to call from the function and check that the number of
1490 // expected arguments matches what we're providing. If so, jump 1489 // expected arguments matches what we're providing. If so, jump
1491 // (tail-call) to the code in register edx without checking arguments. 1490 // (tail-call) to the code in register edx without checking arguments.
1492 // r0: actual number of arguments 1491 // r0: actual number of arguments
1493 // r1: function 1492 // r1: function
1493 Label end;
1494 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1494 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1495 __ ldr(r2, 1495 __ ldr(r2,
1496 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 1496 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1497 __ mov(r2, Operand(r2, ASR, kSmiTagSize)); 1497 __ asr(r2, r2, Operand(kSmiTagSize));
1498 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 1498 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1499 __ SetCallKind(r5, CALL_AS_METHOD); 1499 __ SetCallKind(r5, CALL_AS_METHOD);
1500 __ cmp(r2, r0); // Check formal and actual parameter counts. 1500 __ cmp(r2, r0); // Check formal and actual parameter counts.
1501 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1501 __ bt_near(&end);
1502 RelocInfo::CODE_TARGET, 1502 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1503 ne); 1503 RelocInfo::CODE_TARGET);
1504 1504 __ bind(&end);
1505 ParameterCount expected(0); 1505 ParameterCount expected(0);
1506 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, 1506 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1507 NullCallWrapper(), CALL_AS_METHOD); 1507 NullCallWrapper(), CALL_AS_METHOD);
1508 } 1508 }
1509 1509
1510 1510
1511 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 1511 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1512 const int kIndexOffset = -5 * kPointerSize; 1512 const int kIndexOffset = -5 * kPointerSize;
1513 const int kLimitOffset = -4 * kPointerSize; 1513 const int kLimitOffset = -4 * kPointerSize;
1514 const int kArgsOffset = 2 * kPointerSize; 1514 const int kArgsOffset = 2 * kPointerSize;
(...skipping 11 matching lines...) Expand all
1526 1526
1527 // Check the stack for overflow. We are not trying to catch 1527 // Check the stack for overflow. We are not trying to catch
1528 // interruptions (e.g. debug break and preemption) here, so the "real stack 1528 // interruptions (e.g. debug break and preemption) here, so the "real stack
1529 // limit" is checked. 1529 // limit" is checked.
1530 Label okay; 1530 Label okay;
1531 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 1531 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1532 // Make r2 the space we have left. The stack might already be overflowed 1532 // Make r2 the space we have left. The stack might already be overflowed
1533 // here which will cause r2 to become negative. 1533 // here which will cause r2 to become negative.
1534 __ sub(r2, sp, r2); 1534 __ sub(r2, sp, r2);
1535 // Check if the arguments will overflow the stack. 1535 // Check if the arguments will overflow the stack.
1536 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1536 __ lsl(ip, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
1537 __ b(gt, &okay); // Signed comparison. 1537 __ cmpgt(r2, ip);
1538 __ bt_near(&okay); // Signed comparison.
1538 1539
1539 // Out of stack space. 1540 // Out of stack space.
1540 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1541 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1541 __ push(r1); 1542 __ push(r1);
1542 __ push(r0); 1543 __ push(r0);
1543 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 1544 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1544 // End of stack check. 1545 // End of stack check.
1545 1546
1546 // Push current limit and index. 1547 // Push current limit and index.
1547 __ bind(&okay); 1548 __ bind(&okay);
1548 __ push(r0); // limit 1549 __ push(r0); // limit
1549 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index 1550 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1550 __ push(r1); 1551 __ push(r1);
1551 1552
1552 // Get the receiver. 1553 // Get the receiver.
1553 __ ldr(r0, MemOperand(fp, kRecvOffset)); 1554 __ ldr(r0, MemOperand(fp, kRecvOffset));
1554 1555
1555 // Check that the function is a JS function (otherwise it must be a proxy). 1556 // Check that the function is a JS function (otherwise it must be a proxy).
1556 Label push_receiver; 1557 Label push_receiver;
1557 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1558 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1558 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1559 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE, eq);
1559 __ b(ne, &push_receiver); 1560 __ bf(&push_receiver);
1560 1561
1561 // Change context eagerly to get the right global object if necessary. 1562 // Change context eagerly to get the right global object if necessary.
1562 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1563 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1563 // Load the shared function info while the function is still in r1. 1564 // Load the shared function info while the function is still in r1.
1564 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1565 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1565 1566
1566 // Compute the receiver. 1567 // Compute the receiver.
1567 // Do not transform the receiver for strict mode functions. 1568 // Do not transform the receiver for strict mode functions.
1568 Label call_to_object, use_global_receiver; 1569 Label call_to_object, use_global_receiver;
1569 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); 1570 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
(...skipping 10 matching lines...) Expand all
1580 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1581 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1581 __ cmp(r0, r1); 1582 __ cmp(r0, r1);
1582 __ b(eq, &use_global_receiver); 1583 __ b(eq, &use_global_receiver);
1583 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 1584 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1584 __ cmp(r0, r1); 1585 __ cmp(r0, r1);
1585 __ b(eq, &use_global_receiver); 1586 __ b(eq, &use_global_receiver);
1586 1587
1587 // Check if the receiver is already a JavaScript object. 1588 // Check if the receiver is already a JavaScript object.
1588 // r0: receiver 1589 // r0: receiver
1589 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1590 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1590 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1591 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE, ge);
1591 __ b(ge, &push_receiver); 1592 __ bt(&push_receiver);
1592 1593
1593 // Convert the receiver to a regular object. 1594 // Convert the receiver to a regular object.
1594 // r0: receiver 1595 // r0: receiver
1595 __ bind(&call_to_object); 1596 __ bind(&call_to_object);
1596 __ push(r0); 1597 __ push(r0);
1597 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1598 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1598 __ b(&push_receiver); 1599 __ b(&push_receiver);
1599 1600
1600 // Use the current global receiver object as the receiver. 1601 // Use the current global receiver object as the receiver.
1601 __ bind(&use_global_receiver); 1602 __ bind(&use_global_receiver);
1602 const int kGlobalOffset = 1603 const int kGlobalOffset =
1603 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 1604 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1604 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset)); 1605 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1605 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset)); 1606 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1606 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset)); 1607 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1607 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); 1608 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1608 1609
1609 // Push the receiver. 1610 // Push the receiver.
1610 // r0: receiver 1611 // r0: receiver
1611 __ bind(&push_receiver); 1612 __ bind(&push_receiver);
1612 __ push(r0); 1613 __ push(r0);
1613 1614
1614 // Copy all arguments from the array to the stack. 1615 // Copy all arguments from the array to the stack.
1615 Label entry, loop; 1616 Label entry, loop;
1616 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1617 __ ldr(r0, MemOperand(fp, kIndexOffset));
1617 __ b(&entry); 1618 __ b_near(&entry);
1618 1619
1619 // Load the current argument from the arguments array and push it to the 1620 // Load the current argument from the arguments array and push it to the
1620 // stack. 1621 // stack.
1621 // r0: current argument index 1622 // r0: current argument index
1622 __ bind(&loop); 1623 __ bind(&loop);
1623 __ ldr(r1, MemOperand(fp, kArgsOffset)); 1624 __ ldr(r1, MemOperand(fp, kArgsOffset));
1624 __ push(r1); 1625 __ push(r1);
1625 __ push(r0); 1626 __ push(r0);
1626 1627
1627 // Call the runtime to access the property in the arguments array. 1628 // Call the runtime to access the property in the arguments array.
1628 __ CallRuntime(Runtime::kGetProperty, 2); 1629 __ CallRuntime(Runtime::kGetProperty, 2);
1629 __ push(r0); 1630 __ push(r0);
1630 1631
1631 // Use inline caching to access the arguments. 1632 // Use inline caching to access the arguments.
1632 __ ldr(r0, MemOperand(fp, kIndexOffset)); 1633 __ ldr(r0, MemOperand(fp, kIndexOffset));
1633 __ add(r0, r0, Operand(1 << kSmiTagSize)); 1634 __ add(r0, r0, Operand(1 << kSmiTagSize));
1634 __ str(r0, MemOperand(fp, kIndexOffset)); 1635 __ str(r0, MemOperand(fp, kIndexOffset));
1635 1636
1636 // Test if the copy loop has finished copying all the elements from the 1637 // Test if the copy loop has finished copying all the elements from the
1637 // arguments object. 1638 // arguments object.
1638 __ bind(&entry); 1639 __ bind(&entry);
1639 __ ldr(r1, MemOperand(fp, kLimitOffset)); 1640 __ ldr(r1, MemOperand(fp, kLimitOffset));
1640 __ cmp(r0, r1); 1641 __ cmp(r0, r1);
1641 __ b(ne, &loop); 1642 __ b(ne, &loop);
1642 1643
1643 // Invoke the function. 1644 // Invoke the function.
1644 Label call_proxy; 1645 Label call_proxy;
1645 ParameterCount actual(r0); 1646 ParameterCount actual(r0);
1646 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); 1647 __ asr(r0, r0, Operand(kSmiTagSize));
1647 __ ldr(r1, MemOperand(fp, kFunctionOffset)); 1648 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1648 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); 1649 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE, eq);
1649 __ b(ne, &call_proxy); 1650 __ bf(&call_proxy);
1650 __ InvokeFunction(r1, actual, CALL_FUNCTION, 1651 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1651 NullCallWrapper(), CALL_AS_METHOD); 1652 NullCallWrapper(), CALL_AS_METHOD);
1652 1653
1653 frame_scope.GenerateLeaveFrame(); 1654 frame_scope.GenerateLeaveFrame();
1654 __ add(sp, sp, Operand(3 * kPointerSize)); 1655 __ add(sp, sp, Operand(3 * kPointerSize));
1655 __ Jump(lr); 1656 __ rts();
1656 1657
1657 // Invoke the function proxy. 1658 // Invoke the function proxy.
1658 __ bind(&call_proxy); 1659 __ bind(&call_proxy);
1659 __ push(r1); // add function proxy as last argument 1660 __ push(r1); // add function proxy as last argument
1660 __ add(r0, r0, Operand(1)); 1661 __ add(r0, r0, Operand(1));
1661 __ mov(r2, Operand(0, RelocInfo::NONE)); 1662 __ mov(r2, Operand(0, RelocInfo::NONE));
1662 __ SetCallKind(r5, CALL_AS_METHOD); 1663 __ SetCallKind(r5, CALL_AS_METHOD);
1663 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY); 1664 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1664 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1665 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1665 RelocInfo::CODE_TARGET); 1666 RelocInfo::CODE_TARGET);
1666 1667
1667 // Tear down the internal frame and remove function, receiver and args. 1668 // Tear down the internal frame and remove function, receiver and args.
1668 } 1669 }
1669 __ add(sp, sp, Operand(3 * kPointerSize)); 1670 __ add(sp, sp, Operand(3 * kPointerSize));
1670 __ Jump(lr); 1671 __ rts();
1671 } 1672 }
1672 1673
1673 1674
1674 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1675 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1675 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); 1676 __ lsl(r0, r0, Operand(kSmiTagSize));
1676 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1677 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1677 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit()); 1678 __ push(pr);
1679 __ Push(fp, r4, r1, r0);
1678 __ add(fp, sp, Operand(3 * kPointerSize)); 1680 __ add(fp, sp, Operand(3 * kPointerSize));
1679 } 1681 }
1680 1682
1681 1683
1682 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1684 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1683 // ----------- S t a t e ------------- 1685 // ----------- S t a t e -------------
1684 // -- r0 : result being passed through 1686 // -- r0 : result being passed through
1685 // ----------------------------------- 1687 // -----------------------------------
1686 // Get the number of arguments passed (as a smi), tear down the frame and 1688 // Get the number of arguments passed (as a smi), tear down the frame and
1687 // then tear down the parameters. 1689 // then tear down the parameters.
1688 __ ldr(r1, MemOperand(fp, -3 * kPointerSize)); 1690 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1689 __ mov(sp, fp); 1691 __ mov(sp, fp);
1690 __ ldm(ia_w, sp, fp.bit() | lr.bit()); 1692 __ Pop(pr, fp);
1691 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize)); 1693 __ lsl(ip, r1, Operand(kPointerSizeLog2 - kSmiTagSize));
1694 __ add(sp, sp, ip);
1692 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver 1695 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1693 } 1696 }
1694 1697
1695 1698
1696 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1699 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1697 // ----------- S t a t e ------------- 1700 // ----------- S t a t e -------------
1698 // -- r0 : actual number of arguments 1701 // -- r0 : actual number of arguments
1699 // -- r1 : function (passed through to callee) 1702 // -- r1 : function (passed through to callee)
1700 // -- r2 : expected number of arguments 1703 // -- r2 : expected number of arguments
1701 // -- r3 : code entry to call 1704 // -- r3 : code entry to call
1702 // -- r5 : call kind information 1705 // -- r5 : call kind information
1703 // ----------------------------------- 1706 // -----------------------------------
1704 1707
1705 Label invoke, dont_adapt_arguments; 1708 Label invoke, dont_adapt_arguments;
1706 1709
1707 Label enough, too_few; 1710 Label enough, too_few;
1708 __ cmp(r0, r2); 1711 __ cmpge(r0, r2);
1709 __ b(lt, &too_few); 1712 __ bf(&too_few);
1710 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); 1713 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1711 __ b(eq, &dont_adapt_arguments); 1714 __ b(eq, &dont_adapt_arguments);
1712 1715
1713 { // Enough parameters: actual >= expected 1716 { // Enough parameters: actual >= expected
1714 __ bind(&enough); 1717 __ bind(&enough);
1715 EnterArgumentsAdaptorFrame(masm); 1718 EnterArgumentsAdaptorFrame(masm);
1716 1719
1717 // Calculate copy start address into r0 and copy end address into r2. 1720 // Calculate copy start address into r0 and copy end address into r2.
1718 // r0: actual number of arguments as a smi 1721 // r0: actual number of arguments as a smi
1719 // r1: function 1722 // r1: function
1720 // r2: expected number of arguments 1723 // r2: expected number of arguments
1721 // r3: code entry to call 1724 // r3: code entry to call
1722 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1725 __ lsl(r0, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
1726 __ add(r0, fp, r0);
1723 // adjust for return address and receiver 1727 // adjust for return address and receiver
1724 __ add(r0, r0, Operand(2 * kPointerSize)); 1728 __ add(r0, r0, Operand(2 * kPointerSize));
1725 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2)); 1729 __ lsl(r2, r2, Operand(kPointerSizeLog2));
1730 __ sub(r2, r0, r2);
1726 1731
1727 // Copy the arguments (including the receiver) to the new stack frame. 1732 // Copy the arguments (including the receiver) to the new stack frame.
1728 // r0: copy start address 1733 // r0: copy start address
1729 // r1: function 1734 // r1: function
1730 // r2: copy end address 1735 // r2: copy end address
1731 // r3: code entry to call 1736 // r3: code entry to call
1732 1737
1733 Label copy; 1738 Label copy;
1734 __ bind(&copy); 1739 __ bind(&copy);
1735 __ ldr(ip, MemOperand(r0, 0)); 1740 __ ldr(ip, MemOperand(r0, 0));
1736 __ push(ip); 1741 __ push(ip);
1737 __ cmp(r0, r2); // Compare before moving to next argument. 1742 __ cmp(r0, r2); // Compare before moving to next argument.
1738 __ sub(r0, r0, Operand(kPointerSize)); 1743 __ sub(r0, r0, Operand(kPointerSize));
1739 __ b(ne, &copy); 1744 __ b(ne, &copy);
1740 1745
1741 __ b(&invoke); 1746 __ b(&invoke);
1742 } 1747 }
1743 1748
1744 { // Too few parameters: Actual < expected 1749 { // Too few parameters: Actual < expected
1745 __ bind(&too_few); 1750 __ bind(&too_few);
1746 EnterArgumentsAdaptorFrame(masm); 1751 EnterArgumentsAdaptorFrame(masm);
1747 1752
1748 // Calculate copy start address into r0 and copy end address is fp. 1753 // Calculate copy start address into r0 and copy end address is fp.
1749 // r0: actual number of arguments as a smi 1754 // r0: actual number of arguments as a smi
1750 // r1: function 1755 // r1: function
1751 // r2: expected number of arguments 1756 // r2: expected number of arguments
1752 // r3: code entry to call 1757 // r3: code entry to call
1753 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 1758 __ lsl(r0, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
1759 __ add(r0, fp, r0);
1754 1760
1755 // Copy the arguments (including the receiver) to the new stack frame. 1761 // Copy the arguments (including the receiver) to the new stack frame.
1756 // r0: copy start address 1762 // r0: copy start address
1757 // r1: function 1763 // r1: function
1758 // r2: expected number of arguments 1764 // r2: expected number of arguments
1759 // r3: code entry to call 1765 // r3: code entry to call
1760 Label copy; 1766 Label copy;
1761 __ bind(&copy); 1767 __ bind(&copy);
1762 // Adjust load for return address and receiver. 1768 // Adjust load for return address and receiver.
1763 __ ldr(ip, MemOperand(r0, 2 * kPointerSize)); 1769 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1764 __ push(ip); 1770 __ push(ip);
1765 __ cmp(r0, fp); // Compare before moving to next argument. 1771 __ cmp(r0, fp); // Compare before moving to next argument.
1766 __ sub(r0, r0, Operand(kPointerSize)); 1772 __ sub(r0, r0, Operand(kPointerSize));
1767 __ b(ne, &copy); 1773 __ b(ne, &copy);
1768 1774
1769 // Fill the remaining expected arguments with undefined. 1775 // Fill the remaining expected arguments with undefined.
1770 // r1: function 1776 // r1: function
1771 // r2: expected number of arguments 1777 // r2: expected number of arguments
1772 // r3: code entry to call 1778 // r3: code entry to call
1773 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1779 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1774 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2)); 1780 __ lsl(r2, r2, Operand(kPointerSizeLog2));
1781 __ sub(r2, fp, r2);
1775 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame. 1782 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1776 1783
1777 Label fill; 1784 Label fill;
1778 __ bind(&fill); 1785 __ bind(&fill);
1779 __ push(ip); 1786 __ push(ip);
1780 __ cmp(sp, r2); 1787 __ cmp(sp, r2);
1781 __ b(ne, &fill); 1788 __ b(ne, &fill);
1782 } 1789 }
1783 1790
1784 // Call the entry point. 1791 // Call the entry point.
1785 __ bind(&invoke); 1792 __ bind(&invoke);
1786 __ Call(r3); 1793 __ jsr(r3);
1787 1794
1788 // Store offset of return address for deoptimizer. 1795 // Store offset of return address for deoptimizer.
1789 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 1796 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1790 1797
1791 // Exit frame and return. 1798 // Exit frame and return.
1792 LeaveArgumentsAdaptorFrame(masm); 1799 LeaveArgumentsAdaptorFrame(masm);
1793 __ Jump(lr); 1800 __ rts();
1794 1801
1795 1802
1796 // ------------------------------------------- 1803 // -------------------------------------------
1797 // Dont adapt arguments. 1804 // Dont adapt arguments.
1798 // ------------------------------------------- 1805 // -------------------------------------------
1799 __ bind(&dont_adapt_arguments); 1806 __ bind(&dont_adapt_arguments);
1800 __ Jump(r3); 1807 __ jmp(r3);
1801 } 1808 }
1802 1809
1803 1810
1804 #undef __ 1811 #undef __
1805 1812
1806 } } // namespace v8::internal 1813 } } // namespace v8::internal
1807 1814
1808 #endif // V8_TARGET_ARCH_ARM 1815 #endif // V8_TARGET_ARCH_SH4
OLDNEW
« no previous file with comments | « src/sh4/assembler-sh4-inl.h ('k') | src/sh4/checks-sh4.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698