| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 251 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 251 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
| 252 Generate_JSEntryTrampolineHelper(masm, false); | 252 Generate_JSEntryTrampolineHelper(masm, false); |
| 253 } | 253 } |
| 254 | 254 |
| 255 | 255 |
| 256 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 256 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
| 257 Generate_JSEntryTrampolineHelper(masm, true); | 257 Generate_JSEntryTrampolineHelper(masm, true); |
| 258 } | 258 } |
| 259 | 259 |
| 260 | 260 |
| 261 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| 262 // 1. Make sure we have at least one argument. |
| 263 // r0: actual number of argument |
| 264 { Label done; |
| 265 __ tst(r0, Operand(r0)); |
| 266 __ b(ne, &done); |
| 267 __ mov(r2, Operand(Factory::undefined_value())); |
| 268 __ push(r2); |
| 269 __ add(r0, r0, Operand(1)); |
| 270 __ bind(&done); |
| 271 } |
| 272 |
| 273 // 2. Get the function to call. Already in r1. |
| 274 // r0: actual number of argument |
| 275 { Label done, non_function, function; |
| 276 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); |
| 277 __ tst(r1, Operand(kSmiTagMask)); |
| 278 __ b(eq, &non_function); |
| 279 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 280 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 281 __ cmp(r2, Operand(JS_FUNCTION_TYPE)); |
| 282 __ b(eq, &function); |
| 283 |
| 284 // Non-function called: Clear the function to force exception. |
| 285 __ bind(&non_function); |
| 286 __ mov(r1, Operand(0)); |
| 287 __ b(&done); |
| 288 |
| 289 // Change the context eagerly because it will be used below to get the |
| 290 // right global object. |
| 291 __ bind(&function); |
| 292 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); |
| 293 |
| 294 __ bind(&done); |
| 295 } |
| 296 |
| 297 // 3. Make sure first argument is an object; convert if necessary. |
| 298 // r0: actual number of arguments |
| 299 // r1: function |
| 300 { Label call_to_object, use_global_receiver, patch_receiver, done; |
| 301 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); |
| 302 __ ldr(r2, MemOperand(r2, -kPointerSize)); |
| 303 |
| 304 // r0: actual number of arguments |
| 305 // r1: function |
| 306 // r2: first argument |
| 307 __ tst(r2, Operand(kSmiTagMask)); |
| 308 __ b(eq, &call_to_object); |
| 309 |
| 310 __ mov(r3, Operand(Factory::null_value())); |
| 311 __ cmp(r2, r3); |
| 312 __ b(eq, &use_global_receiver); |
| 313 __ mov(r3, Operand(Factory::undefined_value())); |
| 314 __ cmp(r2, r3); |
| 315 __ b(eq, &use_global_receiver); |
| 316 |
| 317 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 318 __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); |
| 319 __ cmp(r3, Operand(FIRST_JS_OBJECT_TYPE)); |
| 320 __ b(lt, &call_to_object); |
| 321 __ cmp(r3, Operand(LAST_JS_OBJECT_TYPE)); |
| 322 __ b(le, &done); |
| 323 |
| 324 __ bind(&call_to_object); |
| 325 __ EnterInternalFrame(); |
| 326 |
| 327 // Store number of arguments and function across the call into the runtime. |
| 328 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
| 329 __ push(r0); |
| 330 __ push(r1); |
| 331 |
| 332 __ push(r2); |
| 333 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS); |
| 334 __ mov(r2, r0); |
| 335 |
| 336 // Restore number of arguments and function. |
| 337 __ pop(r1); |
| 338 __ pop(r0); |
| 339 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); |
| 340 |
| 341 __ ExitInternalFrame(); |
| 342 __ b(&patch_receiver); |
| 343 |
| 344 // Use the global object from the called function as the receiver. |
| 345 __ bind(&use_global_receiver); |
| 346 const int kGlobalIndex = |
| 347 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
| 348 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex)); |
| 349 |
| 350 __ bind(&patch_receiver); |
| 351 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2)); |
| 352 __ str(r2, MemOperand(r3, -kPointerSize)); |
| 353 |
| 354 __ bind(&done); |
| 355 } |
| 356 |
| 357 // 4. Shift stuff one slot down the stack |
| 358 // r0: actual number of arguments (including call() receiver) |
| 359 // r1: function |
| 360 { Label loop; |
| 361 // Calculate the copy start address (destination). Copy end address is sp. |
| 362 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); |
| 363 |
| 364 __ bind(&loop); |
| 365 __ ldr(ip, MemOperand(r2, -kPointerSize)); |
| 366 __ str(ip, MemOperand(r2)); |
| 367 __ sub(r2, r2, Operand(kPointerSize)); |
| 368 __ cmp(r2, sp); |
| 369 __ b(ne, &loop); |
| 370 } |
| 371 |
| 372 // 5. Adjust the actual number of arguments and remove the top element. |
| 373 // r0: actual number of arguments (including call() receiver) |
| 374 // r1: function |
| 375 __ sub(r0, r0, Operand(1)); |
| 376 __ add(sp, sp, Operand(kPointerSize)); |
| 377 |
| 378 // 6. Get the code for the function or the non-function builtin. |
| 379 // If number of expected arguments matches, then call. Otherwise restart |
| 380 // the arguments adaptor stub. |
| 381 // r0: actual number of arguments |
| 382 // r1: function |
| 383 { Label invoke; |
| 384 __ tst(r1, r1); |
| 385 __ b(ne, &invoke); |
| 386 // __ stop("Generate_ArgumentsAdaptorTrampoline - non-function call"); |
| 387 __ mov(r2, Operand(0)); // expected arguments is 0 for CALL_NON_FUNCTION |
| 388 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION); |
| 389 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), code_target); |
| 390 |
| 391 __ bind(&invoke); |
| 392 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 393 __ ldr(r2, |
| 394 FieldMemOperand(r3, |
| 395 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 396 __ ldr(r3, |
| 397 MemOperand(r3, SharedFunctionInfo::kCodeOffset - kHeapObjectTag)); |
| 398 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 399 __ cmp(r2, r0); // Check formal and actual parameter counts. |
| 400 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), code_target, ne); |
| 401 |
| 402 // 7. Jump to the code in r3 without checking arguments. |
| 403 ParameterCount expected(0); |
| 404 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION); |
| 405 } |
| 406 } |
| 407 |
| 408 |
| 261 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 409 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
| 262 const int kIndexOffset = -5 * kPointerSize; | 410 const int kIndexOffset = -5 * kPointerSize; |
| 263 const int kLimitOffset = -4 * kPointerSize; | 411 const int kLimitOffset = -4 * kPointerSize; |
| 264 const int kArgsOffset = 2 * kPointerSize; | 412 const int kArgsOffset = 2 * kPointerSize; |
| 265 const int kRecvOffset = 3 * kPointerSize; | 413 const int kRecvOffset = 3 * kPointerSize; |
| 266 const int kFunctionOffset = 4 * kPointerSize; | 414 const int kFunctionOffset = 4 * kPointerSize; |
| 267 | 415 |
| 268 __ EnterInternalFrame(); | 416 __ EnterInternalFrame(); |
| 269 | 417 |
| 270 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function | 418 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 411 | 559 |
| 412 | 560 |
| 413 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 561 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
| 414 // ----------- S t a t e ------------- | 562 // ----------- S t a t e ------------- |
| 415 // -- r0 : actual number of arguments | 563 // -- r0 : actual number of arguments |
| 416 // -- r1 : function (passed through to callee) | 564 // -- r1 : function (passed through to callee) |
| 417 // -- r2 : expected number of arguments | 565 // -- r2 : expected number of arguments |
| 418 // -- r3 : code entry to call | 566 // -- r3 : code entry to call |
| 419 // ----------------------------------- | 567 // ----------------------------------- |
| 420 | 568 |
| 421 Label entry, invoke, function_prototype_call; | 569 Label invoke, dont_adapt_arguments; |
| 422 __ bind(&entry); | |
| 423 | 570 |
| 424 Label enough, too_few; | 571 Label enough, too_few; |
| 425 __ cmp(r0, Operand(r2)); | 572 __ cmp(r0, Operand(r2)); |
| 426 __ b(lt, &too_few); | 573 __ b(lt, &too_few); |
| 427 __ cmp(r2, Operand(-1)); | 574 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); |
| 428 __ b(eq, &function_prototype_call); | 575 __ b(eq, &dont_adapt_arguments); |
| 429 | 576 |
| 430 { // Enough parameters: actual >= excpected | 577 { // Enough parameters: actual >= excpected |
| 431 __ bind(&enough); | 578 __ bind(&enough); |
| 432 EnterArgumentsAdaptorFrame(masm); | 579 EnterArgumentsAdaptorFrame(masm); |
| 433 | 580 |
| 434 // Calculate copy start address into r0 and copy end address into r2. | 581 // Calculate copy start address into r0 and copy end address into r2. |
| 435 // r0: actual number of arguments as a smi | 582 // r0: actual number of arguments as a smi |
| 436 // r1: function | 583 // r1: function |
| 437 // r2: expected number of arguments | 584 // r2: expected number of arguments |
| 438 // r3: code entry to call | 585 // r3: code entry to call |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 508 ExitArgumentsAdaptorFrame(masm); | 655 ExitArgumentsAdaptorFrame(masm); |
| 509 __ mov(pc, lr); | 656 __ mov(pc, lr); |
| 510 | 657 |
| 511 // Compute the offset from the beginning of the ArgumentsAdaptorTrampoline | 658 // Compute the offset from the beginning of the ArgumentsAdaptorTrampoline |
| 512 // builtin code object to the return address after the call. | 659 // builtin code object to the return address after the call. |
| 513 ASSERT(return_site.is_bound()); | 660 ASSERT(return_site.is_bound()); |
| 514 arguments_adaptor_call_pc_offset_ = return_site.pos() + Code::kHeaderSize; | 661 arguments_adaptor_call_pc_offset_ = return_site.pos() + Code::kHeaderSize; |
| 515 | 662 |
| 516 | 663 |
| 517 // ------------------------------------------- | 664 // ------------------------------------------- |
| 518 // Function.prototype.call implementation. | 665 // Dont adapt arguments. |
| 519 // ------------------------------------------- | 666 // ------------------------------------------- |
| 520 // r0: actual number of argument | 667 __ bind(&dont_adapt_arguments); |
| 521 __ bind(&function_prototype_call); | 668 __ mov(pc, r3); |
| 522 | |
| 523 // 1. Make sure we have at least one argument. | |
| 524 // r0: actual number of argument | |
| 525 { Label done; | |
| 526 __ tst(r0, Operand(r0)); | |
| 527 __ b(ne, &done); | |
| 528 __ mov(r2, Operand(Factory::undefined_value())); | |
| 529 __ push(r2); | |
| 530 __ add(r0, r0, Operand(1)); | |
| 531 __ bind(&done); | |
| 532 } | |
| 533 | |
| 534 // 2. Get the function to call. Already in r1. | |
| 535 // r0: actual number of argument | |
| 536 { Label done, non_function, function; | |
| 537 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); | |
| 538 __ tst(r1, Operand(kSmiTagMask)); | |
| 539 __ b(eq, &non_function); | |
| 540 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
| 541 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | |
| 542 __ cmp(r2, Operand(JS_FUNCTION_TYPE)); | |
| 543 __ b(eq, &function); | |
| 544 | |
| 545 // Non-function called: Clear the function to force exception. | |
| 546 __ bind(&non_function); | |
| 547 __ mov(r1, Operand(0)); | |
| 548 __ b(&done); | |
| 549 | |
| 550 // Change the context eagerly because it will be used below to get the | |
| 551 // right global object. | |
| 552 __ bind(&function); | |
| 553 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | |
| 554 | |
| 555 __ bind(&done); | |
| 556 } | |
| 557 | |
| 558 // 3. Make sure first argument is an object; convert if necessary. | |
| 559 // r0: actual number of arguments | |
| 560 // r1: function | |
| 561 { Label call_to_object, use_global_receiver, patch_receiver, done; | |
| 562 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); | |
| 563 __ ldr(r2, MemOperand(r2, -kPointerSize)); | |
| 564 | |
| 565 // r0: actual number of arguments | |
| 566 // r1: function | |
| 567 // r2: first argument | |
| 568 __ tst(r2, Operand(kSmiTagMask)); | |
| 569 __ b(eq, &call_to_object); | |
| 570 | |
| 571 __ mov(r3, Operand(Factory::null_value())); | |
| 572 __ cmp(r2, r3); | |
| 573 __ b(eq, &use_global_receiver); | |
| 574 __ mov(r3, Operand(Factory::undefined_value())); | |
| 575 __ cmp(r2, r3); | |
| 576 __ b(eq, &use_global_receiver); | |
| 577 | |
| 578 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | |
| 579 __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); | |
| 580 __ cmp(r3, Operand(FIRST_JS_OBJECT_TYPE)); | |
| 581 __ b(lt, &call_to_object); | |
| 582 __ cmp(r3, Operand(LAST_JS_OBJECT_TYPE)); | |
| 583 __ b(le, &done); | |
| 584 | |
| 585 __ bind(&call_to_object); | |
| 586 __ EnterInternalFrame(); | |
| 587 | |
| 588 // Store number of arguments and function across the call into the runtime. | |
| 589 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); | |
| 590 __ push(r0); | |
| 591 __ push(r1); | |
| 592 | |
| 593 __ push(r2); | |
| 594 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS); | |
| 595 __ mov(r2, r0); | |
| 596 | |
| 597 // Restore number of arguments and function. | |
| 598 __ pop(r1); | |
| 599 __ pop(r0); | |
| 600 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); | |
| 601 | |
| 602 __ ExitInternalFrame(); | |
| 603 __ b(&patch_receiver); | |
| 604 | |
| 605 // Use the global object from the called function as the receiver. | |
| 606 __ bind(&use_global_receiver); | |
| 607 const int kGlobalIndex = | |
| 608 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | |
| 609 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex)); | |
| 610 | |
| 611 __ bind(&patch_receiver); | |
| 612 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2)); | |
| 613 __ str(r2, MemOperand(r3, -kPointerSize)); | |
| 614 | |
| 615 __ bind(&done); | |
| 616 } | |
| 617 | |
| 618 // 4. Shift stuff one slot down the stack | |
| 619 // r0: actual number of arguments (including call() receiver) | |
| 620 // r1: function | |
| 621 { Label loop; | |
| 622 // Calculate the copy start address (destination). Copy end address is sp. | |
| 623 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); | |
| 624 | |
| 625 __ bind(&loop); | |
| 626 __ ldr(ip, MemOperand(r2, -kPointerSize)); | |
| 627 __ str(ip, MemOperand(r2)); | |
| 628 __ sub(r2, r2, Operand(kPointerSize)); | |
| 629 __ cmp(r2, sp); | |
| 630 __ b(ne, &loop); | |
| 631 } | |
| 632 | |
| 633 // 5. Adjust the actual number of arguments and remove the top element. | |
| 634 // r0: actual number of arguments (including call() receiver) | |
| 635 // r1: function | |
| 636 __ sub(r0, r0, Operand(1)); | |
| 637 __ add(sp, sp, Operand(kPointerSize)); | |
| 638 | |
| 639 // 6. Get the code for the function or the non-function builtin. | |
| 640 // If number of expected arguments matches, then call. Otherwise restart | |
| 641 // the arguments adaptor stub. | |
| 642 // r0: actual number of arguments | |
| 643 // r1: function | |
| 644 { Label invoke; | |
| 645 __ tst(r1, r1); | |
| 646 __ b(ne, &invoke); | |
| 647 // __ stop("Generate_ArgumentsAdaptorTrampoline - non-function call"); | |
| 648 __ mov(r2, Operand(0)); // expected arguments is 0 for CALL_NON_FUNCTION | |
| 649 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION); | |
| 650 __ b(&enough); | |
| 651 | |
| 652 __ bind(&invoke); | |
| 653 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | |
| 654 __ ldr(r2, | |
| 655 FieldMemOperand(r3, | |
| 656 SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 657 __ ldr(r3, | |
| 658 MemOperand(r3, SharedFunctionInfo::kCodeOffset - kHeapObjectTag)); | |
| 659 __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 660 __ cmp(r2, r0); // Check formal and actual parameter counts. | |
| 661 __ b(ne, &entry); | |
| 662 | |
| 663 // 7. Jump to the code in r3 without checking arguments. | |
| 664 ParameterCount expected(0); | |
| 665 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION); | |
| 666 } | |
| 667 } | 669 } |
| 668 | 670 |
| 669 | 671 |
| 670 static void Generate_DebugBreakCallHelper(MacroAssembler* masm, | 672 static void Generate_DebugBreakCallHelper(MacroAssembler* masm, |
| 671 RegList pointer_regs) { | 673 RegList pointer_regs) { |
| 672 // Save the content of all general purpose registers in memory. This copy in | 674 // Save the content of all general purpose registers in memory. This copy in |
| 673 // memory is later pushed onto the JS expression stack for the fake JS frame | 675 // memory is later pushed onto the JS expression stack for the fake JS frame |
| 674 // generated and also to the C frame generated on top of that. In the JS | 676 // generated and also to the C frame generated on top of that. In the JS |
| 675 // frame ONLY the registers containing pointers will be pushed on the | 677 // frame ONLY the registers containing pointers will be pushed on the |
| 676 // expression stack. This causes the GC to update these pointers so that | 678 // expression stack. This causes the GC to update these pointers so that |
| (...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 826 } | 828 } |
| 827 | 829 |
| 828 void Builtins::Generate_StubNoRegisters_DebugBreak(MacroAssembler* masm) { | 830 void Builtins::Generate_StubNoRegisters_DebugBreak(MacroAssembler* masm) { |
| 829 // Generate nothing as CodeStub CallFunction is not used on ARM. | 831 // Generate nothing as CodeStub CallFunction is not used on ARM. |
| 830 } | 832 } |
| 831 | 833 |
| 832 | 834 |
| 833 #undef __ | 835 #undef __ |
| 834 | 836 |
| 835 } } // namespace v8::internal | 837 } } // namespace v8::internal |
| OLD | NEW |