| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 264 // Load the prototype from the initial map. | 264 // Load the prototype from the initial map. |
| 265 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | 265 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
| 266 } | 266 } |
| 267 | 267 |
| 268 | 268 |
| 269 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 269 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
| 270 MacroAssembler* masm, | 270 MacroAssembler* masm, |
| 271 int index, | 271 int index, |
| 272 Register prototype, | 272 Register prototype, |
| 273 Label* miss) { | 273 Label* miss) { |
| 274 // Check we're still in the same context. | |
| 275 __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), | |
| 276 masm->isolate()->global_object()); | |
| 277 __ j(not_equal, miss); | |
| 278 // Get the global function with the given index. | 274 // Get the global function with the given index. |
| 279 Handle<JSFunction> function( | 275 Handle<JSFunction> function( |
| 280 JSFunction::cast(masm->isolate()->native_context()->get(index))); | 276 JSFunction::cast(masm->isolate()->native_context()->get(index))); |
| 277 // Check we're still in the same context. |
| 278 Register scratch = prototype; |
| 279 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); |
| 280 __ mov(scratch, Operand(esi, offset)); |
| 281 __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); |
| 282 __ cmp(Operand(scratch, Context::SlotOffset(index)), function); |
| 283 __ j(not_equal, miss); |
| 284 |
| 281 // Load its initial map. The global functions all have initial maps. | 285 // Load its initial map. The global functions all have initial maps. |
| 282 __ Set(prototype, Immediate(Handle<Map>(function->initial_map()))); | 286 __ Set(prototype, Immediate(Handle<Map>(function->initial_map()))); |
| 283 // Load the prototype from the initial map. | 287 // Load the prototype from the initial map. |
| 284 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); | 288 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); |
| 285 } | 289 } |
| 286 | 290 |
| 287 | 291 |
| 288 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, | 292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, |
| 289 Register receiver, | 293 Register receiver, |
| 290 Register scratch, | 294 Register scratch, |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 407 Register name, | 411 Register name, |
| 408 Handle<JSObject> holder_obj, | 412 Handle<JSObject> holder_obj, |
| 409 IC::UtilityId id) { | 413 IC::UtilityId id) { |
| 410 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); | 414 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); |
| 411 __ CallExternalReference( | 415 __ CallExternalReference( |
| 412 ExternalReference(IC_Utility(id), masm->isolate()), | 416 ExternalReference(IC_Utility(id), masm->isolate()), |
| 413 StubCache::kInterceptorArgsLength); | 417 StubCache::kInterceptorArgsLength); |
| 414 } | 418 } |
| 415 | 419 |
| 416 | 420 |
| 417 // Number of pointers to be reserved on stack for fast API call. | |
| 418 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength; | |
| 419 | |
| 420 | |
| 421 // Reserves space for the extra arguments to API function in the | |
| 422 // caller's frame. | |
| 423 // | |
| 424 // These arguments are set by CheckPrototypes and GenerateFastApiCall. | |
| 425 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | |
| 426 // ----------- S t a t e ------------- | |
| 427 // -- esp[0] : return address | |
| 428 // -- esp[4] : last argument in the internal frame of the caller | |
| 429 // ----------------------------------- | |
| 430 __ pop(scratch); | |
| 431 for (int i = 0; i < kFastApiCallArguments; i++) { | |
| 432 __ push(Immediate(Smi::FromInt(0))); | |
| 433 } | |
| 434 __ push(scratch); | |
| 435 } | |
| 436 | |
| 437 | |
| 438 // Undoes the effects of ReserveSpaceForFastApiCall. | |
| 439 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { | |
| 440 // ----------- S t a t e ------------- | |
| 441 // -- esp[0] : return address. | |
| 442 // -- esp[4] : last fast api call extra argument. | |
| 443 // -- ... | |
| 444 // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument. | |
| 445 // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal | |
| 446 // frame. | |
| 447 // ----------------------------------- | |
| 448 __ pop(scratch); | |
| 449 __ add(esp, Immediate(kPointerSize * kFastApiCallArguments)); | |
| 450 __ push(scratch); | |
| 451 } | |
| 452 | |
| 453 | |
| 454 static void GenerateFastApiCallBody(MacroAssembler* masm, | |
| 455 const CallOptimization& optimization, | |
| 456 int argc, | |
| 457 bool restore_context); | |
| 458 | |
| 459 | |
| 460 // Generates call to API function. | |
| 461 static void GenerateFastApiCall(MacroAssembler* masm, | |
| 462 const CallOptimization& optimization, | |
| 463 int argc) { | |
| 464 typedef FunctionCallbackArguments FCA; | |
| 465 // Save calling context. | |
| 466 __ mov(Operand(esp, (1 + FCA::kContextSaveIndex) * kPointerSize), esi); | |
| 467 | |
| 468 // Get the function and setup the context. | |
| 469 Handle<JSFunction> function = optimization.constant_function(); | |
| 470 __ LoadHeapObject(edi, function); | |
| 471 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); | |
| 472 | |
| 473 // Construct the FunctionCallbackInfo. | |
| 474 __ mov(Operand(esp, (1 + FCA::kCalleeIndex) * kPointerSize), edi); | |
| 475 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | |
| 476 Handle<Object> call_data(api_call_info->data(), masm->isolate()); | |
| 477 if (masm->isolate()->heap()->InNewSpace(*call_data)) { | |
| 478 __ mov(ecx, api_call_info); | |
| 479 __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset)); | |
| 480 __ mov(Operand(esp, (1 + FCA::kDataIndex) * kPointerSize), ebx); | |
| 481 } else { | |
| 482 __ mov(Operand(esp, (1 + FCA::kDataIndex) * kPointerSize), | |
| 483 Immediate(call_data)); | |
| 484 } | |
| 485 __ mov(Operand(esp, (1 + FCA::kIsolateIndex) * kPointerSize), | |
| 486 Immediate(reinterpret_cast<int>(masm->isolate()))); | |
| 487 __ mov(Operand(esp, (1 + FCA::kReturnValueOffset) * kPointerSize), | |
| 488 masm->isolate()->factory()->undefined_value()); | |
| 489 __ mov(Operand(esp, (1 + FCA::kReturnValueDefaultValueIndex) * kPointerSize), | |
| 490 masm->isolate()->factory()->undefined_value()); | |
| 491 | |
| 492 // Prepare arguments. | |
| 493 STATIC_ASSERT(kFastApiCallArguments == 7); | |
| 494 __ lea(eax, Operand(esp, 1 * kPointerSize)); | |
| 495 | |
| 496 GenerateFastApiCallBody(masm, optimization, argc, false); | |
| 497 } | |
| 498 | |
| 499 | |
| 500 // Generate call to api function. | 421 // Generate call to api function. |
| 501 // This function uses push() to generate smaller, faster code than | 422 // This function uses push() to generate smaller, faster code than |
| 502 // the version above. It is an optimization that should will be removed | 423 // the version above. It is an optimization that should will be removed |
| 503 // when api call ICs are generated in hydrogen. | 424 // when api call ICs are generated in hydrogen. |
| 504 static void GenerateFastApiCall(MacroAssembler* masm, | 425 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, |
| 505 const CallOptimization& optimization, | 426 const CallOptimization& optimization, |
| 506 Register receiver, | 427 Handle<Map> receiver_map, |
| 507 Register scratch1, | 428 Register receiver, |
| 508 Register scratch2, | 429 Register scratch_in, |
| 509 Register scratch3, | 430 bool is_store, |
| 510 int argc, | 431 int argc, |
| 511 Register* values) { | 432 Register* values) { |
| 512 ASSERT(optimization.is_simple_api_call()); | |
| 513 | |
| 514 // Copy return value. | 433 // Copy return value. |
| 515 __ pop(scratch1); | 434 __ pop(scratch_in); |
| 516 | |
| 517 // receiver | 435 // receiver |
| 518 __ push(receiver); | 436 __ push(receiver); |
| 519 | |
| 520 // Write the arguments to stack frame. | 437 // Write the arguments to stack frame. |
| 521 for (int i = 0; i < argc; i++) { | 438 for (int i = 0; i < argc; i++) { |
| 522 Register arg = values[argc-1-i]; | 439 Register arg = values[argc-1-i]; |
| 523 ASSERT(!receiver.is(arg)); | 440 ASSERT(!receiver.is(arg)); |
| 524 ASSERT(!scratch1.is(arg)); | 441 ASSERT(!scratch_in.is(arg)); |
| 525 ASSERT(!scratch2.is(arg)); | |
| 526 ASSERT(!scratch3.is(arg)); | |
| 527 __ push(arg); | 442 __ push(arg); |
| 528 } | 443 } |
| 444 __ push(scratch_in); |
| 445 // Stack now matches JSFunction abi. |
| 446 ASSERT(optimization.is_simple_api_call()); |
| 529 | 447 |
| 530 typedef FunctionCallbackArguments FCA; | 448 // Abi for CallApiFunctionStub. |
| 449 Register callee = eax; |
| 450 Register call_data = ebx; |
| 451 Register holder = ecx; |
| 452 Register api_function_address = edx; |
| 453 Register scratch = edi; // scratch_in is no longer valid. |
| 531 | 454 |
| 532 STATIC_ASSERT(FCA::kHolderIndex == 0); | 455 // Put holder in place. |
| 533 STATIC_ASSERT(FCA::kIsolateIndex == 1); | 456 CallOptimization::HolderLookup holder_lookup; |
| 534 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); | 457 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( |
| 535 STATIC_ASSERT(FCA::kReturnValueOffset == 3); | 458 receiver_map, |
| 536 STATIC_ASSERT(FCA::kDataIndex == 4); | 459 &holder_lookup); |
| 537 STATIC_ASSERT(FCA::kCalleeIndex == 5); | 460 switch (holder_lookup) { |
| 538 STATIC_ASSERT(FCA::kContextSaveIndex == 6); | 461 case CallOptimization::kHolderIsReceiver: |
| 539 STATIC_ASSERT(FCA::kArgsLength == 7); | 462 __ Move(holder, receiver); |
| 540 | 463 break; |
| 541 // context save | 464 case CallOptimization::kHolderFound: |
| 542 __ push(esi); | 465 __ LoadHeapObject(holder, api_holder); |
| 543 | 466 break; |
| 544 // Get the function and setup the context. | 467 case CallOptimization::kHolderNotFound: |
| 545 Handle<JSFunction> function = optimization.constant_function(); | 468 UNREACHABLE(); |
| 546 __ LoadHeapObject(scratch2, function); | 469 break; |
| 547 __ mov(esi, FieldOperand(scratch2, JSFunction::kContextOffset)); | 470 } |
| 548 // callee | |
| 549 __ push(scratch2); | |
| 550 | 471 |
| 551 Isolate* isolate = masm->isolate(); | 472 Isolate* isolate = masm->isolate(); |
| 473 Handle<JSFunction> function = optimization.constant_function(); |
| 552 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 474 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
| 553 Handle<Object> call_data(api_call_info->data(), isolate); | 475 Handle<Object> call_data_obj(api_call_info->data(), isolate); |
| 554 // Push data from ExecutableAccessorInfo. | 476 |
| 555 if (isolate->heap()->InNewSpace(*call_data)) { | 477 // Put callee in place. |
| 556 __ mov(scratch2, api_call_info); | 478 __ LoadHeapObject(callee, function); |
| 557 __ mov(scratch3, FieldOperand(scratch2, CallHandlerInfo::kDataOffset)); | 479 |
| 558 __ push(scratch3); | 480 bool call_data_undefined = false; |
| 481 // Put call_data in place. |
| 482 if (isolate->heap()->InNewSpace(*call_data_obj)) { |
| 483 __ mov(scratch, api_call_info); |
| 484 __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset)); |
| 485 } else if (call_data_obj->IsUndefined()) { |
| 486 call_data_undefined = true; |
| 487 __ mov(call_data, Immediate(isolate->factory()->undefined_value())); |
| 559 } else { | 488 } else { |
| 560 __ push(Immediate(call_data)); | 489 __ mov(call_data, call_data_obj); |
| 561 } | 490 } |
| 562 // return value | |
| 563 __ push(Immediate(isolate->factory()->undefined_value())); | |
| 564 // return value default | |
| 565 __ push(Immediate(isolate->factory()->undefined_value())); | |
| 566 // isolate | |
| 567 __ push(Immediate(reinterpret_cast<int>(isolate))); | |
| 568 // holder | |
| 569 __ push(receiver); | |
| 570 | 491 |
| 571 // store receiver address for GenerateFastApiCallBody | 492 // Put api_function_address in place. |
| 572 ASSERT(!scratch1.is(eax)); | 493 Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
| 573 __ mov(eax, esp); | 494 __ mov(api_function_address, Immediate(function_address)); |
| 574 | 495 |
| 575 // return address | 496 // Jump to stub. |
| 576 __ push(scratch1); | 497 CallApiFunctionStub stub(is_store, call_data_undefined, argc); |
| 577 | 498 __ TailCallStub(&stub); |
| 578 GenerateFastApiCallBody(masm, optimization, argc, true); | |
| 579 } | 499 } |
| 580 | 500 |
| 581 | 501 |
| 582 static void GenerateFastApiCallBody(MacroAssembler* masm, | |
| 583 const CallOptimization& optimization, | |
| 584 int argc, | |
| 585 bool restore_context) { | |
| 586 // ----------- S t a t e ------------- | |
| 587 // -- esp[0] : return address | |
| 588 // -- esp[4] - esp[28] : FunctionCallbackInfo, incl. | |
| 589 // : object passing the type check | |
| 590 // (set by CheckPrototypes) | |
| 591 // -- esp[32] : last argument | |
| 592 // -- ... | |
| 593 // -- esp[(argc + 7) * 4] : first argument | |
| 594 // -- esp[(argc + 8) * 4] : receiver | |
| 595 // | |
| 596 // -- eax : receiver address | |
| 597 // ----------------------------------- | |
| 598 typedef FunctionCallbackArguments FCA; | |
| 599 | |
| 600 // API function gets reference to the v8::Arguments. If CPU profiler | |
| 601 // is enabled wrapper function will be called and we need to pass | |
| 602 // address of the callback as additional parameter, always allocate | |
| 603 // space for it. | |
| 604 const int kApiArgc = 1 + 1; | |
| 605 | |
| 606 // Allocate the v8::Arguments structure in the arguments' space since | |
| 607 // it's not controlled by GC. | |
| 608 const int kApiStackSpace = 4; | |
| 609 | |
| 610 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | |
| 611 | |
| 612 // Function address is a foreign pointer outside V8's heap. | |
| 613 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | |
| 614 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace); | |
| 615 | |
| 616 // FunctionCallbackInfo::implicit_args_. | |
| 617 __ mov(ApiParameterOperand(2), eax); | |
| 618 __ add(eax, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize)); | |
| 619 // FunctionCallbackInfo::values_. | |
| 620 __ mov(ApiParameterOperand(3), eax); | |
| 621 // FunctionCallbackInfo::length_. | |
| 622 __ Set(ApiParameterOperand(4), Immediate(argc)); | |
| 623 // FunctionCallbackInfo::is_construct_call_. | |
| 624 __ Set(ApiParameterOperand(5), Immediate(0)); | |
| 625 | |
| 626 // v8::InvocationCallback's argument. | |
| 627 __ lea(eax, ApiParameterOperand(2)); | |
| 628 __ mov(ApiParameterOperand(0), eax); | |
| 629 | |
| 630 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); | |
| 631 | |
| 632 Operand context_restore_operand(ebp, | |
| 633 (2 + FCA::kContextSaveIndex) * kPointerSize); | |
| 634 Operand return_value_operand(ebp, | |
| 635 (2 + FCA::kReturnValueOffset) * kPointerSize); | |
| 636 __ CallApiFunctionAndReturn(function_address, | |
| 637 thunk_address, | |
| 638 ApiParameterOperand(1), | |
| 639 argc + kFastApiCallArguments + 1, | |
| 640 return_value_operand, | |
| 641 restore_context ? | |
| 642 &context_restore_operand : NULL); | |
| 643 } | |
| 644 | |
| 645 | |
| 646 class CallInterceptorCompiler BASE_EMBEDDED { | |
| 647 public: | |
| 648 CallInterceptorCompiler(CallStubCompiler* stub_compiler, | |
| 649 const ParameterCount& arguments, | |
| 650 Register name) | |
| 651 : stub_compiler_(stub_compiler), | |
| 652 arguments_(arguments), | |
| 653 name_(name) {} | |
| 654 | |
| 655 void Compile(MacroAssembler* masm, | |
| 656 Handle<JSObject> object, | |
| 657 Handle<JSObject> holder, | |
| 658 Handle<Name> name, | |
| 659 LookupResult* lookup, | |
| 660 Register receiver, | |
| 661 Register scratch1, | |
| 662 Register scratch2, | |
| 663 Register scratch3, | |
| 664 Label* miss) { | |
| 665 ASSERT(holder->HasNamedInterceptor()); | |
| 666 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); | |
| 667 | |
| 668 // Check that the receiver isn't a smi. | |
| 669 __ JumpIfSmi(receiver, miss); | |
| 670 | |
| 671 CallOptimization optimization(lookup); | |
| 672 if (optimization.is_constant_call()) { | |
| 673 CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3, | |
| 674 holder, lookup, name, optimization, miss); | |
| 675 } else { | |
| 676 CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3, | |
| 677 name, holder, miss); | |
| 678 } | |
| 679 } | |
| 680 | |
| 681 private: | |
| 682 void CompileCacheable(MacroAssembler* masm, | |
| 683 Handle<JSObject> object, | |
| 684 Register receiver, | |
| 685 Register scratch1, | |
| 686 Register scratch2, | |
| 687 Register scratch3, | |
| 688 Handle<JSObject> interceptor_holder, | |
| 689 LookupResult* lookup, | |
| 690 Handle<Name> name, | |
| 691 const CallOptimization& optimization, | |
| 692 Label* miss_label) { | |
| 693 ASSERT(optimization.is_constant_call()); | |
| 694 ASSERT(!lookup->holder()->IsGlobalObject()); | |
| 695 | |
| 696 int depth1 = kInvalidProtoDepth; | |
| 697 int depth2 = kInvalidProtoDepth; | |
| 698 bool can_do_fast_api_call = false; | |
| 699 if (optimization.is_simple_api_call() && | |
| 700 !lookup->holder()->IsGlobalObject()) { | |
| 701 depth1 = optimization.GetPrototypeDepthOfExpectedType( | |
| 702 object, interceptor_holder); | |
| 703 if (depth1 == kInvalidProtoDepth) { | |
| 704 depth2 = optimization.GetPrototypeDepthOfExpectedType( | |
| 705 interceptor_holder, Handle<JSObject>(lookup->holder())); | |
| 706 } | |
| 707 can_do_fast_api_call = | |
| 708 depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth; | |
| 709 } | |
| 710 | |
| 711 Counters* counters = masm->isolate()->counters(); | |
| 712 __ IncrementCounter(counters->call_const_interceptor(), 1); | |
| 713 | |
| 714 if (can_do_fast_api_call) { | |
| 715 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1); | |
| 716 ReserveSpaceForFastApiCall(masm, scratch1); | |
| 717 } | |
| 718 | |
| 719 // Check that the maps from receiver to interceptor's holder | |
| 720 // haven't changed and thus we can invoke interceptor. | |
| 721 Label miss_cleanup; | |
| 722 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; | |
| 723 Register holder = | |
| 724 stub_compiler_->CheckPrototypes( | |
| 725 IC::CurrentTypeOf(object, masm->isolate()), receiver, | |
| 726 interceptor_holder, scratch1, scratch2, scratch3, | |
| 727 name, depth1, miss); | |
| 728 | |
| 729 // Invoke an interceptor and if it provides a value, | |
| 730 // branch to |regular_invoke|. | |
| 731 Label regular_invoke; | |
| 732 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, | |
| 733 ®ular_invoke); | |
| 734 | |
| 735 // Interceptor returned nothing for this property. Try to use cached | |
| 736 // constant function. | |
| 737 | |
| 738 // Check that the maps from interceptor's holder to constant function's | |
| 739 // holder haven't changed and thus we can use cached constant function. | |
| 740 if (*interceptor_holder != lookup->holder()) { | |
| 741 stub_compiler_->CheckPrototypes( | |
| 742 IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder, | |
| 743 handle(lookup->holder()), scratch1, scratch2, scratch3, | |
| 744 name, depth2, miss); | |
| 745 } else { | |
| 746 // CheckPrototypes has a side effect of fetching a 'holder' | |
| 747 // for API (object which is instanceof for the signature). It's | |
| 748 // safe to omit it here, as if present, it should be fetched | |
| 749 // by the previous CheckPrototypes. | |
| 750 ASSERT(depth2 == kInvalidProtoDepth); | |
| 751 } | |
| 752 | |
| 753 // Invoke function. | |
| 754 if (can_do_fast_api_call) { | |
| 755 GenerateFastApiCall(masm, optimization, arguments_.immediate()); | |
| 756 } else { | |
| 757 Handle<JSFunction> fun = optimization.constant_function(); | |
| 758 stub_compiler_->GenerateJumpFunction(object, fun); | |
| 759 } | |
| 760 | |
| 761 // Deferred code for fast API call case---clean preallocated space. | |
| 762 if (can_do_fast_api_call) { | |
| 763 __ bind(&miss_cleanup); | |
| 764 FreeSpaceForFastApiCall(masm, scratch1); | |
| 765 __ jmp(miss_label); | |
| 766 } | |
| 767 | |
| 768 // Invoke a regular function. | |
| 769 __ bind(®ular_invoke); | |
| 770 if (can_do_fast_api_call) { | |
| 771 FreeSpaceForFastApiCall(masm, scratch1); | |
| 772 } | |
| 773 } | |
| 774 | |
| 775 void CompileRegular(MacroAssembler* masm, | |
| 776 Handle<JSObject> object, | |
| 777 Register receiver, | |
| 778 Register scratch1, | |
| 779 Register scratch2, | |
| 780 Register scratch3, | |
| 781 Handle<Name> name, | |
| 782 Handle<JSObject> interceptor_holder, | |
| 783 Label* miss_label) { | |
| 784 Register holder = | |
| 785 stub_compiler_->CheckPrototypes( | |
| 786 IC::CurrentTypeOf(object, masm->isolate()), receiver, | |
| 787 interceptor_holder, scratch1, scratch2, scratch3, name, miss_label); | |
| 788 | |
| 789 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 790 // Save the name_ register across the call. | |
| 791 __ push(name_); | |
| 792 | |
| 793 CompileCallLoadPropertyWithInterceptor( | |
| 794 masm, receiver, holder, name_, interceptor_holder, | |
| 795 IC::kLoadPropertyWithInterceptorForCall); | |
| 796 | |
| 797 // Restore the name_ register. | |
| 798 __ pop(name_); | |
| 799 | |
| 800 // Leave the internal frame. | |
| 801 } | |
| 802 | |
| 803 void LoadWithInterceptor(MacroAssembler* masm, | |
| 804 Register receiver, | |
| 805 Register holder, | |
| 806 Handle<JSObject> holder_obj, | |
| 807 Label* interceptor_succeeded) { | |
| 808 { | |
| 809 FrameScope scope(masm, StackFrame::INTERNAL); | |
| 810 __ push(receiver); | |
| 811 __ push(holder); | |
| 812 __ push(name_); | |
| 813 | |
| 814 CompileCallLoadPropertyWithInterceptor( | |
| 815 masm, receiver, holder, name_, holder_obj, | |
| 816 IC::kLoadPropertyWithInterceptorOnly); | |
| 817 | |
| 818 __ pop(name_); | |
| 819 __ pop(holder); | |
| 820 __ pop(receiver); | |
| 821 // Leave the internal frame. | |
| 822 } | |
| 823 | |
| 824 __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel()); | |
| 825 __ j(not_equal, interceptor_succeeded); | |
| 826 } | |
| 827 | |
| 828 CallStubCompiler* stub_compiler_; | |
| 829 const ParameterCount& arguments_; | |
| 830 Register name_; | |
| 831 }; | |
| 832 | |
| 833 | |
| 834 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, | 502 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, |
| 835 Label* label, | 503 Label* label, |
| 836 Handle<Name> name) { | 504 Handle<Name> name) { |
| 837 if (!label->is_unused()) { | 505 if (!label->is_unused()) { |
| 838 __ bind(label); | 506 __ bind(label); |
| 839 __ mov(this->name(), Immediate(name)); | 507 __ mov(this->name(), Immediate(name)); |
| 840 } | 508 } |
| 841 } | 509 } |
| 842 | 510 |
| 843 | 511 |
| (...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1176 | 844 |
| 1177 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) { | 845 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) { |
| 1178 __ jmp(code, RelocInfo::CODE_TARGET); | 846 __ jmp(code, RelocInfo::CODE_TARGET); |
| 1179 } | 847 } |
| 1180 | 848 |
| 1181 | 849 |
| 1182 #undef __ | 850 #undef __ |
| 1183 #define __ ACCESS_MASM(masm()) | 851 #define __ ACCESS_MASM(masm()) |
| 1184 | 852 |
| 1185 | 853 |
| 1186 Register StubCompiler::CheckPrototypes(Handle<Type> type, | 854 Register StubCompiler::CheckPrototypes(Handle<HeapType> type, |
| 1187 Register object_reg, | 855 Register object_reg, |
| 1188 Handle<JSObject> holder, | 856 Handle<JSObject> holder, |
| 1189 Register holder_reg, | 857 Register holder_reg, |
| 1190 Register scratch1, | 858 Register scratch1, |
| 1191 Register scratch2, | 859 Register scratch2, |
| 1192 Handle<Name> name, | 860 Handle<Name> name, |
| 1193 int save_at_depth, | |
| 1194 Label* miss, | 861 Label* miss, |
| 1195 PrototypeCheckType check) { | 862 PrototypeCheckType check) { |
| 1196 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate())); | 863 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate())); |
| 1197 // Make sure that the type feedback oracle harvests the receiver map. | |
| 1198 // TODO(svenpanne) Remove this hack when all ICs are reworked. | |
| 1199 __ mov(scratch1, receiver_map); | |
| 1200 | 864 |
| 1201 // Make sure there's no overlap between holder and object registers. | 865 // Make sure there's no overlap between holder and object registers. |
| 1202 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); | 866 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); |
| 1203 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) | 867 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) |
| 1204 && !scratch2.is(scratch1)); | 868 && !scratch2.is(scratch1)); |
| 1205 | 869 |
| 1206 // Keep track of the current object in register reg. | 870 // Keep track of the current object in register reg. |
| 1207 Register reg = object_reg; | 871 Register reg = object_reg; |
| 1208 int depth = 0; | 872 int depth = 0; |
| 1209 | 873 |
| 1210 const int kHolderIndex = FunctionCallbackArguments::kHolderIndex + 1; | |
| 1211 if (save_at_depth == depth) { | |
| 1212 __ mov(Operand(esp, kHolderIndex * kPointerSize), reg); | |
| 1213 } | |
| 1214 | |
| 1215 Handle<JSObject> current = Handle<JSObject>::null(); | 874 Handle<JSObject> current = Handle<JSObject>::null(); |
| 1216 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); | 875 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant()); |
| 1217 Handle<JSObject> prototype = Handle<JSObject>::null(); | 876 Handle<JSObject> prototype = Handle<JSObject>::null(); |
| 1218 Handle<Map> current_map = receiver_map; | 877 Handle<Map> current_map = receiver_map; |
| 1219 Handle<Map> holder_map(holder->map()); | 878 Handle<Map> holder_map(holder->map()); |
| 1220 // Traverse the prototype chain and check the maps in the prototype chain for | 879 // Traverse the prototype chain and check the maps in the prototype chain for |
| 1221 // fast and global objects or do negative lookup for normal objects. | 880 // fast and global objects or do negative lookup for normal objects. |
| 1222 while (!current_map.is_identical_to(holder_map)) { | 881 while (!current_map.is_identical_to(holder_map)) { |
| 1223 ++depth; | 882 ++depth; |
| 1224 | 883 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1272 if (in_new_space) { | 931 if (in_new_space) { |
| 1273 // The prototype is in new space; we cannot store a reference to it | 932 // The prototype is in new space; we cannot store a reference to it |
| 1274 // in the code. Load it from the map. | 933 // in the code. Load it from the map. |
| 1275 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); | 934 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); |
| 1276 } else { | 935 } else { |
| 1277 // The prototype is in old space; load it directly. | 936 // The prototype is in old space; load it directly. |
| 1278 __ mov(reg, prototype); | 937 __ mov(reg, prototype); |
| 1279 } | 938 } |
| 1280 } | 939 } |
| 1281 | 940 |
| 1282 if (save_at_depth == depth) { | |
| 1283 __ mov(Operand(esp, kHolderIndex * kPointerSize), reg); | |
| 1284 } | |
| 1285 | |
| 1286 // Go to the next object in the prototype chain. | 941 // Go to the next object in the prototype chain. |
| 1287 current = prototype; | 942 current = prototype; |
| 1288 current_map = handle(current->map()); | 943 current_map = handle(current->map()); |
| 1289 } | 944 } |
| 1290 | 945 |
| 1291 // Log the check depth. | 946 // Log the check depth. |
| 1292 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); | 947 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); |
| 1293 | 948 |
| 1294 if (depth != 0 || check == CHECK_ALL_MAPS) { | 949 if (depth != 0 || check == CHECK_ALL_MAPS) { |
| 1295 // Check the holder map. | 950 // Check the holder map. |
| (...skipping 28 matching lines...) Expand all Loading... |
| 1324 Label success; | 979 Label success; |
| 1325 __ jmp(&success); | 980 __ jmp(&success); |
| 1326 GenerateRestoreName(masm(), miss, name); | 981 GenerateRestoreName(masm(), miss, name); |
| 1327 TailCallBuiltin(masm(), MissBuiltin(kind())); | 982 TailCallBuiltin(masm(), MissBuiltin(kind())); |
| 1328 __ bind(&success); | 983 __ bind(&success); |
| 1329 } | 984 } |
| 1330 } | 985 } |
| 1331 | 986 |
| 1332 | 987 |
| 1333 Register LoadStubCompiler::CallbackHandlerFrontend( | 988 Register LoadStubCompiler::CallbackHandlerFrontend( |
| 1334 Handle<Type> type, | 989 Handle<HeapType> type, |
| 1335 Register object_reg, | 990 Register object_reg, |
| 1336 Handle<JSObject> holder, | 991 Handle<JSObject> holder, |
| 1337 Handle<Name> name, | 992 Handle<Name> name, |
| 1338 Handle<Object> callback) { | 993 Handle<Object> callback) { |
| 1339 Label miss; | 994 Label miss; |
| 1340 | 995 |
| 1341 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss); | 996 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss); |
| 1342 | 997 |
| 1343 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { | 998 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { |
| 1344 ASSERT(!reg.is(scratch2())); | 999 ASSERT(!reg.is(scratch2())); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1402 } else { | 1057 } else { |
| 1403 KeyedLoadFieldStub stub(field.is_inobject(holder), | 1058 KeyedLoadFieldStub stub(field.is_inobject(holder), |
| 1404 field.translate(holder), | 1059 field.translate(holder), |
| 1405 representation); | 1060 representation); |
| 1406 GenerateTailCall(masm(), stub.GetCode(isolate())); | 1061 GenerateTailCall(masm(), stub.GetCode(isolate())); |
| 1407 } | 1062 } |
| 1408 } | 1063 } |
| 1409 | 1064 |
| 1410 | 1065 |
| 1411 void LoadStubCompiler::GenerateLoadCallback( | 1066 void LoadStubCompiler::GenerateLoadCallback( |
| 1412 const CallOptimization& call_optimization) { | |
| 1413 GenerateFastApiCall( | |
| 1414 masm(), call_optimization, receiver(), scratch1(), | |
| 1415 scratch2(), name(), 0, NULL); | |
| 1416 } | |
| 1417 | |
| 1418 | |
| 1419 void LoadStubCompiler::GenerateLoadCallback( | |
| 1420 Register reg, | 1067 Register reg, |
| 1421 Handle<ExecutableAccessorInfo> callback) { | 1068 Handle<ExecutableAccessorInfo> callback) { |
| 1422 // Insert additional parameters into the stack frame above return address. | 1069 // Insert additional parameters into the stack frame above return address. |
| 1423 ASSERT(!scratch3().is(reg)); | 1070 ASSERT(!scratch3().is(reg)); |
| 1424 __ pop(scratch3()); // Get return address to place it below. | 1071 __ pop(scratch3()); // Get return address to place it below. |
| 1425 | 1072 |
| 1426 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); | 1073 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); |
| 1427 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); | 1074 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); |
| 1428 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); | 1075 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); |
| 1429 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); | 1076 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1442 // ReturnValue default value | 1089 // ReturnValue default value |
| 1443 __ push(Immediate(isolate()->factory()->undefined_value())); | 1090 __ push(Immediate(isolate()->factory()->undefined_value())); |
| 1444 __ push(Immediate(reinterpret_cast<int>(isolate()))); | 1091 __ push(Immediate(reinterpret_cast<int>(isolate()))); |
| 1445 __ push(reg); // holder | 1092 __ push(reg); // holder |
| 1446 | 1093 |
| 1447 // Save a pointer to where we pushed the arguments. This will be | 1094 // Save a pointer to where we pushed the arguments. This will be |
| 1448 // passed as the const PropertyAccessorInfo& to the C++ callback. | 1095 // passed as the const PropertyAccessorInfo& to the C++ callback. |
| 1449 __ push(esp); | 1096 __ push(esp); |
| 1450 | 1097 |
| 1451 __ push(name()); // name | 1098 __ push(name()); // name |
| 1452 __ mov(ebx, esp); // esp points to reference to name (handler). | |
| 1453 | 1099 |
| 1454 __ push(scratch3()); // Restore return address. | 1100 __ push(scratch3()); // Restore return address. |
| 1455 | 1101 |
| 1456 // array for v8::Arguments::values_, handler for name and pointer | 1102 // Abi for CallApiGetter |
| 1457 // to the values (it considered as smi in GC). | 1103 Register getter_address = edx; |
| 1458 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2; | 1104 Address function_address = v8::ToCData<Address>(callback->getter()); |
| 1459 // Allocate space for opional callback address parameter in case | 1105 __ mov(getter_address, Immediate(function_address)); |
| 1460 // CPU profiler is active. | |
| 1461 const int kApiArgc = 2 + 1; | |
| 1462 | 1106 |
| 1463 Address getter_address = v8::ToCData<Address>(callback->getter()); | 1107 CallApiGetterStub stub; |
| 1464 __ PrepareCallApiFunction(kApiArgc); | 1108 __ TailCallStub(&stub); |
| 1465 __ mov(ApiParameterOperand(0), ebx); // name. | |
| 1466 __ add(ebx, Immediate(kPointerSize)); | |
| 1467 __ mov(ApiParameterOperand(1), ebx); // arguments pointer. | |
| 1468 | |
| 1469 // Emitting a stub call may try to allocate (if the code is not | |
| 1470 // already generated). Do not allow the assembler to perform a | |
| 1471 // garbage collection but instead return the allocation failure | |
| 1472 // object. | |
| 1473 | |
| 1474 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback); | |
| 1475 | |
| 1476 __ CallApiFunctionAndReturn(getter_address, | |
| 1477 thunk_address, | |
| 1478 ApiParameterOperand(2), | |
| 1479 kStackSpace, | |
| 1480 Operand(ebp, 7 * kPointerSize), | |
| 1481 NULL); | |
| 1482 } | 1109 } |
| 1483 | 1110 |
| 1484 | 1111 |
| 1485 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { | 1112 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { |
| 1486 // Return the constant value. | 1113 // Return the constant value. |
| 1487 __ LoadObject(eax, value); | 1114 __ LoadObject(eax, value); |
| 1488 __ ret(0); | 1115 __ ret(0); |
| 1489 } | 1116 } |
| 1490 | 1117 |
| 1491 | 1118 |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1581 __ push(scratch2()); // restore old return address | 1208 __ push(scratch2()); // restore old return address |
| 1582 | 1209 |
| 1583 ExternalReference ref = | 1210 ExternalReference ref = |
| 1584 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), | 1211 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), |
| 1585 isolate()); | 1212 isolate()); |
| 1586 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1); | 1213 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1); |
| 1587 } | 1214 } |
| 1588 } | 1215 } |
| 1589 | 1216 |
| 1590 | 1217 |
| 1591 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) { | |
| 1592 if (kind_ == Code::KEYED_CALL_IC) { | |
| 1593 __ cmp(ecx, Immediate(name)); | |
| 1594 __ j(not_equal, miss); | |
| 1595 } | |
| 1596 } | |
| 1597 | |
| 1598 | |
| 1599 void CallStubCompiler::GenerateFunctionCheck(Register function, | |
| 1600 Register scratch, | |
| 1601 Label* miss) { | |
| 1602 __ JumpIfSmi(function, miss); | |
| 1603 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); | |
| 1604 __ j(not_equal, miss); | |
| 1605 } | |
| 1606 | |
| 1607 | |
| 1608 void CallStubCompiler::GenerateLoadFunctionFromCell( | |
| 1609 Handle<Cell> cell, | |
| 1610 Handle<JSFunction> function, | |
| 1611 Label* miss) { | |
| 1612 // Get the value from the cell. | |
| 1613 if (Serializer::enabled()) { | |
| 1614 __ mov(edi, Immediate(cell)); | |
| 1615 __ mov(edi, FieldOperand(edi, Cell::kValueOffset)); | |
| 1616 } else { | |
| 1617 __ mov(edi, Operand::ForCell(cell)); | |
| 1618 } | |
| 1619 | |
| 1620 // Check that the cell contains the same function. | |
| 1621 if (isolate()->heap()->InNewSpace(*function)) { | |
| 1622 // We can't embed a pointer to a function in new space so we have | |
| 1623 // to verify that the shared function info is unchanged. This has | |
| 1624 // the nice side effect that multiple closures based on the same | |
| 1625 // function can all use this call IC. Before we load through the | |
| 1626 // function, we have to verify that it still is a function. | |
| 1627 GenerateFunctionCheck(edi, ebx, miss); | |
| 1628 | |
| 1629 // Check the shared function info. Make sure it hasn't changed. | |
| 1630 __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset), | |
| 1631 Immediate(Handle<SharedFunctionInfo>(function->shared()))); | |
| 1632 } else { | |
| 1633 __ cmp(edi, Immediate(function)); | |
| 1634 } | |
| 1635 __ j(not_equal, miss); | |
| 1636 } | |
| 1637 | |
| 1638 | |
| 1639 void CallStubCompiler::GenerateMissBranch() { | |
| 1640 Handle<Code> code = | |
| 1641 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), | |
| 1642 kind_, | |
| 1643 extra_state()); | |
| 1644 __ jmp(code, RelocInfo::CODE_TARGET); | |
| 1645 } | |
| 1646 | |
| 1647 | |
| 1648 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, | |
| 1649 Handle<JSObject> holder, | |
| 1650 PropertyIndex index, | |
| 1651 Handle<Name> name) { | |
| 1652 Label miss; | |
| 1653 | |
| 1654 Register reg = HandlerFrontendHeader( | |
| 1655 object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
| 1656 | |
| 1657 GenerateFastPropertyLoad( | |
| 1658 masm(), edi, reg, index.is_inobject(holder), | |
| 1659 index.translate(holder), Representation::Tagged()); | |
| 1660 GenerateJumpFunction(object, edi, &miss); | |
| 1661 | |
| 1662 HandlerFrontendFooter(&miss); | |
| 1663 | |
| 1664 // Return the generated code. | |
| 1665 return GetCode(Code::FAST, name); | |
| 1666 } | |
| 1667 | |
| 1668 | |
| 1669 Handle<Code> CallStubCompiler::CompileArrayPushCall( | |
| 1670 Handle<Object> object, | |
| 1671 Handle<JSObject> holder, | |
| 1672 Handle<Cell> cell, | |
| 1673 Handle<JSFunction> function, | |
| 1674 Handle<String> name, | |
| 1675 Code::StubType type) { | |
| 1676 // If object is not an array or is observed or sealed, bail out to regular | |
| 1677 // call. | |
| 1678 if (!object->IsJSArray() || | |
| 1679 !cell.is_null() || | |
| 1680 Handle<JSArray>::cast(object)->map()->is_observed() || | |
| 1681 !Handle<JSArray>::cast(object)->map()->is_extensible()) { | |
| 1682 return Handle<Code>::null(); | |
| 1683 } | |
| 1684 | |
| 1685 Label miss; | |
| 1686 | |
| 1687 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
| 1688 | |
| 1689 const int argc = arguments().immediate(); | |
| 1690 if (argc == 0) { | |
| 1691 // Noop, return the length. | |
| 1692 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); | |
| 1693 __ ret((argc + 1) * kPointerSize); | |
| 1694 } else { | |
| 1695 Label call_builtin; | |
| 1696 | |
| 1697 if (argc == 1) { // Otherwise fall through to call builtin. | |
| 1698 Label attempt_to_grow_elements, with_write_barrier, check_double; | |
| 1699 | |
| 1700 // Get the elements array of the object. | |
| 1701 __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); | |
| 1702 | |
| 1703 // Check that the elements are in fast mode and writable. | |
| 1704 __ cmp(FieldOperand(edi, HeapObject::kMapOffset), | |
| 1705 Immediate(factory()->fixed_array_map())); | |
| 1706 __ j(not_equal, &check_double); | |
| 1707 | |
| 1708 // Get the array's length into eax and calculate new length. | |
| 1709 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); | |
| 1710 STATIC_ASSERT(kSmiTagSize == 1); | |
| 1711 STATIC_ASSERT(kSmiTag == 0); | |
| 1712 __ add(eax, Immediate(Smi::FromInt(argc))); | |
| 1713 | |
| 1714 // Get the elements' length into ecx. | |
| 1715 __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); | |
| 1716 | |
| 1717 // Check if we could survive without allocation. | |
| 1718 __ cmp(eax, ecx); | |
| 1719 __ j(greater, &attempt_to_grow_elements); | |
| 1720 | |
| 1721 // Check if value is a smi. | |
| 1722 __ mov(ecx, Operand(esp, argc * kPointerSize)); | |
| 1723 __ JumpIfNotSmi(ecx, &with_write_barrier); | |
| 1724 | |
| 1725 // Save new length. | |
| 1726 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); | |
| 1727 | |
| 1728 // Store the value. | |
| 1729 __ mov(FieldOperand(edi, | |
| 1730 eax, | |
| 1731 times_half_pointer_size, | |
| 1732 FixedArray::kHeaderSize - argc * kPointerSize), | |
| 1733 ecx); | |
| 1734 | |
| 1735 __ ret((argc + 1) * kPointerSize); | |
| 1736 | |
| 1737 __ bind(&check_double); | |
| 1738 | |
| 1739 | |
| 1740 // Check that the elements are in double mode. | |
| 1741 __ cmp(FieldOperand(edi, HeapObject::kMapOffset), | |
| 1742 Immediate(factory()->fixed_double_array_map())); | |
| 1743 __ j(not_equal, &call_builtin); | |
| 1744 | |
| 1745 // Get the array's length into eax and calculate new length. | |
| 1746 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); | |
| 1747 STATIC_ASSERT(kSmiTagSize == 1); | |
| 1748 STATIC_ASSERT(kSmiTag == 0); | |
| 1749 __ add(eax, Immediate(Smi::FromInt(argc))); | |
| 1750 | |
| 1751 // Get the elements' length into ecx. | |
| 1752 __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); | |
| 1753 | |
| 1754 // Check if we could survive without allocation. | |
| 1755 __ cmp(eax, ecx); | |
| 1756 __ j(greater, &call_builtin); | |
| 1757 | |
| 1758 __ mov(ecx, Operand(esp, argc * kPointerSize)); | |
| 1759 __ StoreNumberToDoubleElements( | |
| 1760 ecx, edi, eax, ecx, xmm0, &call_builtin, true, argc * kDoubleSize); | |
| 1761 | |
| 1762 // Save new length. | |
| 1763 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); | |
| 1764 __ ret((argc + 1) * kPointerSize); | |
| 1765 | |
| 1766 __ bind(&with_write_barrier); | |
| 1767 | |
| 1768 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); | |
| 1769 | |
| 1770 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { | |
| 1771 Label fast_object, not_fast_object; | |
| 1772 __ CheckFastObjectElements(ebx, ¬_fast_object, Label::kNear); | |
| 1773 __ jmp(&fast_object); | |
| 1774 // In case of fast smi-only, convert to fast object, otherwise bail out. | |
| 1775 __ bind(¬_fast_object); | |
| 1776 __ CheckFastSmiElements(ebx, &call_builtin); | |
| 1777 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), | |
| 1778 Immediate(factory()->heap_number_map())); | |
| 1779 __ j(equal, &call_builtin); | |
| 1780 // edi: elements array | |
| 1781 // edx: receiver | |
| 1782 // ebx: map | |
| 1783 Label try_holey_map; | |
| 1784 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | |
| 1785 FAST_ELEMENTS, | |
| 1786 ebx, | |
| 1787 edi, | |
| 1788 &try_holey_map); | |
| 1789 | |
| 1790 ElementsTransitionGenerator:: | |
| 1791 GenerateMapChangeElementsTransition(masm(), | |
| 1792 DONT_TRACK_ALLOCATION_SITE, | |
| 1793 NULL); | |
| 1794 // Restore edi. | |
| 1795 __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); | |
| 1796 __ jmp(&fast_object); | |
| 1797 | |
| 1798 __ bind(&try_holey_map); | |
| 1799 __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS, | |
| 1800 FAST_HOLEY_ELEMENTS, | |
| 1801 ebx, | |
| 1802 edi, | |
| 1803 &call_builtin); | |
| 1804 ElementsTransitionGenerator:: | |
| 1805 GenerateMapChangeElementsTransition(masm(), | |
| 1806 DONT_TRACK_ALLOCATION_SITE, | |
| 1807 NULL); | |
| 1808 // Restore edi. | |
| 1809 __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset)); | |
| 1810 __ bind(&fast_object); | |
| 1811 } else { | |
| 1812 __ CheckFastObjectElements(ebx, &call_builtin); | |
| 1813 } | |
| 1814 | |
| 1815 // Save new length. | |
| 1816 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); | |
| 1817 | |
| 1818 // Store the value. | |
| 1819 __ lea(edx, FieldOperand(edi, | |
| 1820 eax, times_half_pointer_size, | |
| 1821 FixedArray::kHeaderSize - argc * kPointerSize)); | |
| 1822 __ mov(Operand(edx, 0), ecx); | |
| 1823 | |
| 1824 __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, | |
| 1825 OMIT_SMI_CHECK); | |
| 1826 | |
| 1827 __ ret((argc + 1) * kPointerSize); | |
| 1828 | |
| 1829 __ bind(&attempt_to_grow_elements); | |
| 1830 if (!FLAG_inline_new) { | |
| 1831 __ jmp(&call_builtin); | |
| 1832 } | |
| 1833 | |
| 1834 __ mov(ebx, Operand(esp, argc * kPointerSize)); | |
| 1835 // Growing elements that are SMI-only requires special handling in case | |
| 1836 // the new element is non-Smi. For now, delegate to the builtin. | |
| 1837 Label no_fast_elements_check; | |
| 1838 __ JumpIfSmi(ebx, &no_fast_elements_check); | |
| 1839 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); | |
| 1840 __ CheckFastObjectElements(ecx, &call_builtin, Label::kFar); | |
| 1841 __ bind(&no_fast_elements_check); | |
| 1842 | |
| 1843 // We could be lucky and the elements array could be at the top of | |
| 1844 // new-space. In this case we can just grow it in place by moving the | |
| 1845 // allocation pointer up. | |
| 1846 | |
| 1847 ExternalReference new_space_allocation_top = | |
| 1848 ExternalReference::new_space_allocation_top_address(isolate()); | |
| 1849 ExternalReference new_space_allocation_limit = | |
| 1850 ExternalReference::new_space_allocation_limit_address(isolate()); | |
| 1851 | |
| 1852 const int kAllocationDelta = 4; | |
| 1853 // Load top. | |
| 1854 __ mov(ecx, Operand::StaticVariable(new_space_allocation_top)); | |
| 1855 | |
| 1856 // Check if it's the end of elements. | |
| 1857 __ lea(edx, FieldOperand(edi, | |
| 1858 eax, times_half_pointer_size, | |
| 1859 FixedArray::kHeaderSize - argc * kPointerSize)); | |
| 1860 __ cmp(edx, ecx); | |
| 1861 __ j(not_equal, &call_builtin); | |
| 1862 __ add(ecx, Immediate(kAllocationDelta * kPointerSize)); | |
| 1863 __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit)); | |
| 1864 __ j(above, &call_builtin); | |
| 1865 | |
| 1866 // We fit and could grow elements. | |
| 1867 __ mov(Operand::StaticVariable(new_space_allocation_top), ecx); | |
| 1868 | |
| 1869 // Push the argument... | |
| 1870 __ mov(Operand(edx, 0), ebx); | |
| 1871 // ... and fill the rest with holes. | |
| 1872 for (int i = 1; i < kAllocationDelta; i++) { | |
| 1873 __ mov(Operand(edx, i * kPointerSize), | |
| 1874 Immediate(factory()->the_hole_value())); | |
| 1875 } | |
| 1876 | |
| 1877 // We know the elements array is in new space so we don't need the | |
| 1878 // remembered set, but we just pushed a value onto it so we may have to | |
| 1879 // tell the incremental marker to rescan the object that we just grew. We | |
| 1880 // don't need to worry about the holes because they are in old space and | |
| 1881 // already marked black. | |
| 1882 __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); | |
| 1883 | |
| 1884 // Restore receiver to edx as finish sequence assumes it's here. | |
| 1885 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); | |
| 1886 | |
| 1887 // Increment element's and array's sizes. | |
| 1888 __ add(FieldOperand(edi, FixedArray::kLengthOffset), | |
| 1889 Immediate(Smi::FromInt(kAllocationDelta))); | |
| 1890 | |
| 1891 // NOTE: This only happen in new-space, where we don't | |
| 1892 // care about the black-byte-count on pages. Otherwise we should | |
| 1893 // update that too if the object is black. | |
| 1894 | |
| 1895 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); | |
| 1896 | |
| 1897 __ ret((argc + 1) * kPointerSize); | |
| 1898 } | |
| 1899 | |
| 1900 __ bind(&call_builtin); | |
| 1901 __ TailCallExternalReference( | |
| 1902 ExternalReference(Builtins::c_ArrayPush, isolate()), | |
| 1903 argc + 1, | |
| 1904 1); | |
| 1905 } | |
| 1906 | |
| 1907 HandlerFrontendFooter(&miss); | |
| 1908 | |
| 1909 // Return the generated code. | |
| 1910 return GetCode(type, name); | |
| 1911 } | |
| 1912 | |
| 1913 | |
| 1914 Handle<Code> CallStubCompiler::CompileArrayPopCall( | |
| 1915 Handle<Object> object, | |
| 1916 Handle<JSObject> holder, | |
| 1917 Handle<Cell> cell, | |
| 1918 Handle<JSFunction> function, | |
| 1919 Handle<String> name, | |
| 1920 Code::StubType type) { | |
| 1921 // If object is not an array or is observed or sealed, bail out to regular | |
| 1922 // call. | |
| 1923 if (!object->IsJSArray() || | |
| 1924 !cell.is_null() || | |
| 1925 Handle<JSArray>::cast(object)->map()->is_observed() || | |
| 1926 !Handle<JSArray>::cast(object)->map()->is_extensible()) { | |
| 1927 return Handle<Code>::null(); | |
| 1928 } | |
| 1929 | |
| 1930 Label miss, return_undefined, call_builtin; | |
| 1931 | |
| 1932 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
| 1933 | |
| 1934 // Get the elements array of the object. | |
| 1935 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); | |
| 1936 | |
| 1937 // Check that the elements are in fast mode and writable. | |
| 1938 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), | |
| 1939 Immediate(factory()->fixed_array_map())); | |
| 1940 __ j(not_equal, &call_builtin); | |
| 1941 | |
| 1942 // Get the array's length into ecx and calculate new length. | |
| 1943 __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset)); | |
| 1944 __ sub(ecx, Immediate(Smi::FromInt(1))); | |
| 1945 __ j(negative, &return_undefined); | |
| 1946 | |
| 1947 // Get the last element. | |
| 1948 STATIC_ASSERT(kSmiTagSize == 1); | |
| 1949 STATIC_ASSERT(kSmiTag == 0); | |
| 1950 __ mov(eax, FieldOperand(ebx, | |
| 1951 ecx, times_half_pointer_size, | |
| 1952 FixedArray::kHeaderSize)); | |
| 1953 __ cmp(eax, Immediate(factory()->the_hole_value())); | |
| 1954 __ j(equal, &call_builtin); | |
| 1955 | |
| 1956 // Set the array's length. | |
| 1957 __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx); | |
| 1958 | |
| 1959 // Fill with the hole. | |
| 1960 __ mov(FieldOperand(ebx, | |
| 1961 ecx, times_half_pointer_size, | |
| 1962 FixedArray::kHeaderSize), | |
| 1963 Immediate(factory()->the_hole_value())); | |
| 1964 const int argc = arguments().immediate(); | |
| 1965 __ ret((argc + 1) * kPointerSize); | |
| 1966 | |
| 1967 __ bind(&return_undefined); | |
| 1968 __ mov(eax, Immediate(factory()->undefined_value())); | |
| 1969 __ ret((argc + 1) * kPointerSize); | |
| 1970 | |
| 1971 __ bind(&call_builtin); | |
| 1972 __ TailCallExternalReference( | |
| 1973 ExternalReference(Builtins::c_ArrayPop, isolate()), | |
| 1974 argc + 1, | |
| 1975 1); | |
| 1976 | |
| 1977 HandlerFrontendFooter(&miss); | |
| 1978 | |
| 1979 // Return the generated code. | |
| 1980 return GetCode(type, name); | |
| 1981 } | |
| 1982 | |
| 1983 | |
| 1984 Handle<Code> CallStubCompiler::CompileFastApiCall( | |
| 1985 const CallOptimization& optimization, | |
| 1986 Handle<Object> object, | |
| 1987 Handle<JSObject> holder, | |
| 1988 Handle<Cell> cell, | |
| 1989 Handle<JSFunction> function, | |
| 1990 Handle<String> name) { | |
| 1991 ASSERT(optimization.is_simple_api_call()); | |
| 1992 // Bail out if object is a global object as we don't want to | |
| 1993 // repatch it to global receiver. | |
| 1994 if (object->IsGlobalObject()) return Handle<Code>::null(); | |
| 1995 if (!cell.is_null()) return Handle<Code>::null(); | |
| 1996 if (!object->IsJSObject()) return Handle<Code>::null(); | |
| 1997 int depth = optimization.GetPrototypeDepthOfExpectedType( | |
| 1998 Handle<JSObject>::cast(object), holder); | |
| 1999 if (depth == kInvalidProtoDepth) return Handle<Code>::null(); | |
| 2000 | |
| 2001 Label miss, miss_before_stack_reserved; | |
| 2002 | |
| 2003 GenerateNameCheck(name, &miss_before_stack_reserved); | |
| 2004 | |
| 2005 // Get the receiver from the stack. | |
| 2006 const int argc = arguments().immediate(); | |
| 2007 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); | |
| 2008 | |
| 2009 // Check that the receiver isn't a smi. | |
| 2010 __ JumpIfSmi(edx, &miss_before_stack_reserved); | |
| 2011 | |
| 2012 Counters* counters = isolate()->counters(); | |
| 2013 __ IncrementCounter(counters->call_const(), 1); | |
| 2014 __ IncrementCounter(counters->call_const_fast_api(), 1); | |
| 2015 | |
| 2016 // Allocate space for v8::Arguments implicit values. Must be initialized | |
| 2017 // before calling any runtime function. | |
| 2018 __ sub(esp, Immediate(kFastApiCallArguments * kPointerSize)); | |
| 2019 | |
| 2020 // Check that the maps haven't changed and find a Holder as a side effect. | |
| 2021 CheckPrototypes(IC::CurrentTypeOf(object, isolate()), edx, holder, | |
| 2022 ebx, eax, edi, name, depth, &miss); | |
| 2023 | |
| 2024 // Move the return address on top of the stack. | |
| 2025 __ mov(eax, Operand(esp, kFastApiCallArguments * kPointerSize)); | |
| 2026 __ mov(Operand(esp, 0 * kPointerSize), eax); | |
| 2027 | |
| 2028 // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains | |
| 2029 // duplicate of return address and will be overwritten. | |
| 2030 GenerateFastApiCall(masm(), optimization, argc); | |
| 2031 | |
| 2032 __ bind(&miss); | |
| 2033 __ add(esp, Immediate(kFastApiCallArguments * kPointerSize)); | |
| 2034 | |
| 2035 HandlerFrontendFooter(&miss_before_stack_reserved); | |
| 2036 | |
| 2037 // Return the generated code. | |
| 2038 return GetCode(function); | |
| 2039 } | |
| 2040 | |
| 2041 | |
| 2042 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { | 1218 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { |
| 2043 Label success; | 1219 Label success; |
| 2044 // Check that the object is a boolean. | 1220 // Check that the object is a boolean. |
| 2045 __ cmp(object, factory()->true_value()); | 1221 __ cmp(object, factory()->true_value()); |
| 2046 __ j(equal, &success); | 1222 __ j(equal, &success); |
| 2047 __ cmp(object, factory()->false_value()); | 1223 __ cmp(object, factory()->false_value()); |
| 2048 __ j(not_equal, miss); | 1224 __ j(not_equal, miss); |
| 2049 __ bind(&success); | 1225 __ bind(&success); |
| 2050 } | 1226 } |
| 2051 | 1227 |
| 2052 | 1228 |
| 2053 void CallStubCompiler::PatchImplicitReceiver(Handle<Object> object) { | |
| 2054 if (object->IsGlobalObject()) { | |
| 2055 const int argc = arguments().immediate(); | |
| 2056 const int receiver_offset = (argc + 1) * kPointerSize; | |
| 2057 __ mov(Operand(esp, receiver_offset), | |
| 2058 isolate()->factory()->undefined_value()); | |
| 2059 } | |
| 2060 } | |
| 2061 | |
| 2062 | |
| 2063 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object, | |
| 2064 Handle<JSObject> holder, | |
| 2065 Handle<Name> name, | |
| 2066 CheckType check, | |
| 2067 Label* miss) { | |
| 2068 GenerateNameCheck(name, miss); | |
| 2069 | |
| 2070 Register reg = edx; | |
| 2071 | |
| 2072 const int argc = arguments().immediate(); | |
| 2073 const int receiver_offset = (argc + 1) * kPointerSize; | |
| 2074 __ mov(reg, Operand(esp, receiver_offset)); | |
| 2075 | |
| 2076 // Check that the receiver isn't a smi. | |
| 2077 if (check != NUMBER_CHECK) { | |
| 2078 __ JumpIfSmi(reg, miss); | |
| 2079 } | |
| 2080 | |
| 2081 // Make sure that it's okay not to patch the on stack receiver | |
| 2082 // unless we're doing a receiver map check. | |
| 2083 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); | |
| 2084 switch (check) { | |
| 2085 case RECEIVER_MAP_CHECK: | |
| 2086 __ IncrementCounter(isolate()->counters()->call_const(), 1); | |
| 2087 | |
| 2088 // Check that the maps haven't changed. | |
| 2089 reg = CheckPrototypes(IC::CurrentTypeOf(object, isolate()), reg, holder, | |
| 2090 ebx, eax, edi, name, miss); | |
| 2091 | |
| 2092 break; | |
| 2093 | |
| 2094 case STRING_CHECK: { | |
| 2095 // Check that the object is a string. | |
| 2096 __ CmpObjectType(reg, FIRST_NONSTRING_TYPE, eax); | |
| 2097 __ j(above_equal, miss); | |
| 2098 // Check that the maps starting from the prototype haven't changed. | |
| 2099 GenerateDirectLoadGlobalFunctionPrototype( | |
| 2100 masm(), Context::STRING_FUNCTION_INDEX, eax, miss); | |
| 2101 break; | |
| 2102 } | |
| 2103 case SYMBOL_CHECK: { | |
| 2104 // Check that the object is a symbol. | |
| 2105 __ CmpObjectType(reg, SYMBOL_TYPE, eax); | |
| 2106 __ j(not_equal, miss); | |
| 2107 // Check that the maps starting from the prototype haven't changed. | |
| 2108 GenerateDirectLoadGlobalFunctionPrototype( | |
| 2109 masm(), Context::SYMBOL_FUNCTION_INDEX, eax, miss); | |
| 2110 break; | |
| 2111 } | |
| 2112 case NUMBER_CHECK: { | |
| 2113 Label fast; | |
| 2114 // Check that the object is a smi or a heap number. | |
| 2115 __ JumpIfSmi(reg, &fast); | |
| 2116 __ CmpObjectType(reg, HEAP_NUMBER_TYPE, eax); | |
| 2117 __ j(not_equal, miss); | |
| 2118 __ bind(&fast); | |
| 2119 // Check that the maps starting from the prototype haven't changed. | |
| 2120 GenerateDirectLoadGlobalFunctionPrototype( | |
| 2121 masm(), Context::NUMBER_FUNCTION_INDEX, eax, miss); | |
| 2122 break; | |
| 2123 } | |
| 2124 case BOOLEAN_CHECK: { | |
| 2125 GenerateBooleanCheck(reg, miss); | |
| 2126 // Check that the maps starting from the prototype haven't changed. | |
| 2127 GenerateDirectLoadGlobalFunctionPrototype( | |
| 2128 masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, miss); | |
| 2129 break; | |
| 2130 } | |
| 2131 } | |
| 2132 | |
| 2133 if (check != RECEIVER_MAP_CHECK) { | |
| 2134 Handle<Object> prototype(object->GetPrototype(isolate()), isolate()); | |
| 2135 reg = CheckPrototypes( | |
| 2136 IC::CurrentTypeOf(prototype, isolate()), | |
| 2137 eax, holder, ebx, edx, edi, name, miss); | |
| 2138 } | |
| 2139 | |
| 2140 return reg; | |
| 2141 } | |
| 2142 | |
| 2143 | |
| 2144 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object, | |
| 2145 Register function, | |
| 2146 Label* miss) { | |
| 2147 // Check that the function really is a function. | |
| 2148 GenerateFunctionCheck(function, ebx, miss); | |
| 2149 | |
| 2150 if (!function.is(edi)) __ mov(edi, function); | |
| 2151 PatchImplicitReceiver(object); | |
| 2152 | |
| 2153 // Invoke the function. | |
| 2154 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION, NullCallWrapper()); | |
| 2155 } | |
| 2156 | |
| 2157 | |
| 2158 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, | |
| 2159 Handle<JSObject> holder, | |
| 2160 Handle<Name> name) { | |
| 2161 Label miss; | |
| 2162 | |
| 2163 GenerateNameCheck(name, &miss); | |
| 2164 | |
| 2165 // Get the number of arguments. | |
| 2166 const int argc = arguments().immediate(); | |
| 2167 | |
| 2168 LookupResult lookup(isolate()); | |
| 2169 LookupPostInterceptor(holder, name, &lookup); | |
| 2170 | |
| 2171 // Get the receiver from the stack. | |
| 2172 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); | |
| 2173 | |
| 2174 CallInterceptorCompiler compiler(this, arguments(), ecx); | |
| 2175 compiler.Compile(masm(), object, holder, name, &lookup, edx, ebx, edi, eax, | |
| 2176 &miss); | |
| 2177 | |
| 2178 // Restore receiver. | |
| 2179 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); | |
| 2180 | |
| 2181 GenerateJumpFunction(object, eax, &miss); | |
| 2182 | |
| 2183 HandlerFrontendFooter(&miss); | |
| 2184 | |
| 2185 // Return the generated code. | |
| 2186 return GetCode(Code::FAST, name); | |
| 2187 } | |
| 2188 | |
| 2189 | |
| 2190 Handle<Code> CallStubCompiler::CompileCallGlobal( | |
| 2191 Handle<JSObject> object, | |
| 2192 Handle<GlobalObject> holder, | |
| 2193 Handle<PropertyCell> cell, | |
| 2194 Handle<JSFunction> function, | |
| 2195 Handle<Name> name) { | |
| 2196 if (HasCustomCallGenerator(function)) { | |
| 2197 Handle<Code> code = CompileCustomCall( | |
| 2198 object, holder, cell, function, Handle<String>::cast(name), | |
| 2199 Code::NORMAL); | |
| 2200 // A null handle means bail out to the regular compiler code below. | |
| 2201 if (!code.is_null()) return code; | |
| 2202 } | |
| 2203 | |
| 2204 Label miss; | |
| 2205 HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); | |
| 2206 // Potentially loads a closure that matches the shared function info of the | |
| 2207 // function, rather than function. | |
| 2208 GenerateLoadFunctionFromCell(cell, function, &miss); | |
| 2209 GenerateJumpFunction(object, edi, function); | |
| 2210 | |
| 2211 HandlerFrontendFooter(&miss); | |
| 2212 | |
| 2213 // Return the generated code. | |
| 2214 return GetCode(Code::NORMAL, name); | |
| 2215 } | |
| 2216 | |
| 2217 | |
| 2218 Handle<Code> StoreStubCompiler::CompileStoreCallback( | 1229 Handle<Code> StoreStubCompiler::CompileStoreCallback( |
| 2219 Handle<JSObject> object, | 1230 Handle<JSObject> object, |
| 2220 Handle<JSObject> holder, | 1231 Handle<JSObject> holder, |
| 2221 Handle<Name> name, | 1232 Handle<Name> name, |
| 2222 Handle<ExecutableAccessorInfo> callback) { | 1233 Handle<ExecutableAccessorInfo> callback) { |
| 2223 Register holder_reg = HandlerFrontend( | 1234 Register holder_reg = HandlerFrontend( |
| 2224 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name); | 1235 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name); |
| 2225 | 1236 |
| 2226 __ pop(scratch1()); // remove the return address | 1237 __ pop(scratch1()); // remove the return address |
| 2227 __ push(receiver()); | 1238 __ push(receiver()); |
| 2228 __ push(holder_reg); | 1239 __ push(holder_reg); |
| 2229 __ Push(callback); | 1240 __ Push(callback); |
| 2230 __ Push(name); | 1241 __ Push(name); |
| 2231 __ push(value()); | 1242 __ push(value()); |
| 2232 __ push(scratch1()); // restore return address | 1243 __ push(scratch1()); // restore return address |
| 2233 | 1244 |
| 2234 // Do tail-call to the runtime system. | 1245 // Do tail-call to the runtime system. |
| 2235 ExternalReference store_callback_property = | 1246 ExternalReference store_callback_property = |
| 2236 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); | 1247 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); |
| 2237 __ TailCallExternalReference(store_callback_property, 5, 1); | 1248 __ TailCallExternalReference(store_callback_property, 5, 1); |
| 2238 | 1249 |
| 2239 // Return the generated code. | 1250 // Return the generated code. |
| 2240 return GetCode(kind(), Code::FAST, name); | 1251 return GetCode(kind(), Code::FAST, name); |
| 2241 } | 1252 } |
| 2242 | 1253 |
| 2243 | 1254 |
| 2244 Handle<Code> StoreStubCompiler::CompileStoreCallback( | |
| 2245 Handle<JSObject> object, | |
| 2246 Handle<JSObject> holder, | |
| 2247 Handle<Name> name, | |
| 2248 const CallOptimization& call_optimization) { | |
| 2249 HandlerFrontend(IC::CurrentTypeOf(object, isolate()), | |
| 2250 receiver(), holder, name); | |
| 2251 | |
| 2252 Register values[] = { value() }; | |
| 2253 GenerateFastApiCall( | |
| 2254 masm(), call_optimization, receiver(), scratch1(), | |
| 2255 scratch2(), this->name(), 1, values); | |
| 2256 | |
| 2257 // Return the generated code. | |
| 2258 return GetCode(kind(), Code::FAST, name); | |
| 2259 } | |
| 2260 | |
| 2261 | |
| 2262 #undef __ | 1255 #undef __ |
| 2263 #define __ ACCESS_MASM(masm) | 1256 #define __ ACCESS_MASM(masm) |
| 2264 | 1257 |
| 2265 | 1258 |
| 2266 void StoreStubCompiler::GenerateStoreViaSetter( | 1259 void StoreStubCompiler::GenerateStoreViaSetter( |
| 2267 MacroAssembler* masm, | 1260 MacroAssembler* masm, |
| 1261 Handle<HeapType> type, |
| 2268 Handle<JSFunction> setter) { | 1262 Handle<JSFunction> setter) { |
| 2269 // ----------- S t a t e ------------- | 1263 // ----------- S t a t e ------------- |
| 2270 // -- eax : value | 1264 // -- eax : value |
| 2271 // -- ecx : name | 1265 // -- ecx : name |
| 2272 // -- edx : receiver | 1266 // -- edx : receiver |
| 2273 // -- esp[0] : return address | 1267 // -- esp[0] : return address |
| 2274 // ----------------------------------- | 1268 // ----------------------------------- |
| 2275 { | 1269 { |
| 2276 FrameScope scope(masm, StackFrame::INTERNAL); | 1270 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1271 Register receiver = edx; |
| 1272 Register value = eax; |
| 2277 | 1273 |
| 2278 // Save value register, so we can restore it later. | 1274 // Save value register, so we can restore it later. |
| 2279 __ push(eax); | 1275 __ push(value); |
| 2280 | 1276 |
| 2281 if (!setter.is_null()) { | 1277 if (!setter.is_null()) { |
| 2282 // Call the JavaScript setter with receiver and value on the stack. | 1278 // Call the JavaScript setter with receiver and value on the stack. |
| 2283 __ push(edx); | 1279 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 2284 __ push(eax); | 1280 // Swap in the global receiver. |
| 1281 __ mov(receiver, |
| 1282 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); |
| 1283 } |
| 1284 __ push(receiver); |
| 1285 __ push(value); |
| 2285 ParameterCount actual(1); | 1286 ParameterCount actual(1); |
| 2286 ParameterCount expected(setter); | 1287 ParameterCount expected(setter); |
| 2287 __ InvokeFunction(setter, expected, actual, | 1288 __ InvokeFunction(setter, expected, actual, |
| 2288 CALL_FUNCTION, NullCallWrapper()); | 1289 CALL_FUNCTION, NullCallWrapper()); |
| 2289 } else { | 1290 } else { |
| 2290 // If we generate a global code snippet for deoptimization only, remember | 1291 // If we generate a global code snippet for deoptimization only, remember |
| 2291 // the place to continue after deoptimization. | 1292 // the place to continue after deoptimization. |
| 2292 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | 1293 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
| 2293 } | 1294 } |
| 2294 | 1295 |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2346 } | 1347 } |
| 2347 __ bind(&miss); | 1348 __ bind(&miss); |
| 2348 TailCallBuiltin(masm(), MissBuiltin(kind())); | 1349 TailCallBuiltin(masm(), MissBuiltin(kind())); |
| 2349 | 1350 |
| 2350 // Return the generated code. | 1351 // Return the generated code. |
| 2351 return GetICCode( | 1352 return GetICCode( |
| 2352 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); | 1353 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); |
| 2353 } | 1354 } |
| 2354 | 1355 |
| 2355 | 1356 |
| 2356 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<Type> type, | 1357 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type, |
| 2357 Handle<JSObject> last, | 1358 Handle<JSObject> last, |
| 2358 Handle<Name> name) { | 1359 Handle<Name> name) { |
| 2359 NonexistentHandlerFrontend(type, last, name); | 1360 NonexistentHandlerFrontend(type, last, name); |
| 2360 | 1361 |
| 2361 // Return undefined if maps of the full prototype chain are still the | 1362 // Return undefined if maps of the full prototype chain are still the |
| 2362 // same and no global property with this name contains a value. | 1363 // same and no global property with this name contains a value. |
| 2363 __ mov(eax, isolate()->factory()->undefined_value()); | 1364 __ mov(eax, isolate()->factory()->undefined_value()); |
| 2364 __ ret(0); | 1365 __ ret(0); |
| 2365 | 1366 |
| 2366 // Return the generated code. | 1367 // Return the generated code. |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2394 static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg }; | 1395 static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg }; |
| 2395 return registers; | 1396 return registers; |
| 2396 } | 1397 } |
| 2397 | 1398 |
| 2398 | 1399 |
| 2399 #undef __ | 1400 #undef __ |
| 2400 #define __ ACCESS_MASM(masm) | 1401 #define __ ACCESS_MASM(masm) |
| 2401 | 1402 |
| 2402 | 1403 |
| 2403 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, | 1404 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, |
| 1405 Handle<HeapType> type, |
| 2404 Register receiver, | 1406 Register receiver, |
| 2405 Handle<JSFunction> getter) { | 1407 Handle<JSFunction> getter) { |
| 2406 { | 1408 { |
| 2407 FrameScope scope(masm, StackFrame::INTERNAL); | 1409 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2408 | 1410 |
| 2409 if (!getter.is_null()) { | 1411 if (!getter.is_null()) { |
| 2410 // Call the JavaScript getter with the receiver on the stack. | 1412 // Call the JavaScript getter with the receiver on the stack. |
| 1413 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 1414 // Swap in the global receiver. |
| 1415 __ mov(receiver, |
| 1416 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); |
| 1417 } |
| 2411 __ push(receiver); | 1418 __ push(receiver); |
| 2412 ParameterCount actual(0); | 1419 ParameterCount actual(0); |
| 2413 ParameterCount expected(getter); | 1420 ParameterCount expected(getter); |
| 2414 __ InvokeFunction(getter, expected, actual, | 1421 __ InvokeFunction(getter, expected, actual, |
| 2415 CALL_FUNCTION, NullCallWrapper()); | 1422 CALL_FUNCTION, NullCallWrapper()); |
| 2416 } else { | 1423 } else { |
| 2417 // If we generate a global code snippet for deoptimization only, remember | 1424 // If we generate a global code snippet for deoptimization only, remember |
| 2418 // the place to continue after deoptimization. | 1425 // the place to continue after deoptimization. |
| 2419 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); | 1426 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); |
| 2420 } | 1427 } |
| 2421 | 1428 |
| 2422 // Restore context register. | 1429 // Restore context register. |
| 2423 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 1430 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2424 } | 1431 } |
| 2425 __ ret(0); | 1432 __ ret(0); |
| 2426 } | 1433 } |
| 2427 | 1434 |
| 2428 | 1435 |
| 2429 #undef __ | 1436 #undef __ |
| 2430 #define __ ACCESS_MASM(masm()) | 1437 #define __ ACCESS_MASM(masm()) |
| 2431 | 1438 |
| 2432 | 1439 |
| 2433 Handle<Code> LoadStubCompiler::CompileLoadGlobal( | 1440 Handle<Code> LoadStubCompiler::CompileLoadGlobal( |
| 2434 Handle<Type> type, | 1441 Handle<HeapType> type, |
| 2435 Handle<GlobalObject> global, | 1442 Handle<GlobalObject> global, |
| 2436 Handle<PropertyCell> cell, | 1443 Handle<PropertyCell> cell, |
| 2437 Handle<Name> name, | 1444 Handle<Name> name, |
| 2438 bool is_dont_delete) { | 1445 bool is_dont_delete) { |
| 2439 Label miss; | 1446 Label miss; |
| 2440 | 1447 |
| 2441 HandlerFrontendHeader(type, receiver(), global, name, &miss); | 1448 HandlerFrontendHeader(type, receiver(), global, name, &miss); |
| 2442 // Get the value from the cell. | 1449 // Get the value from the cell. |
| 2443 if (Serializer::enabled()) { | 1450 if (Serializer::enabled()) { |
| 2444 __ mov(eax, Immediate(cell)); | 1451 __ mov(eax, Immediate(cell)); |
| 2445 __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset)); | 1452 __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset)); |
| 2446 } else { | 1453 } else { |
| 2447 __ mov(eax, Operand::ForCell(cell)); | 1454 __ mov(eax, Operand::ForCell(cell)); |
| 2448 } | 1455 } |
| 2449 | 1456 |
| 2450 // Check for deleted property if property can actually be deleted. | 1457 // Check for deleted property if property can actually be deleted. |
| 2451 if (!is_dont_delete) { | 1458 if (!is_dont_delete) { |
| 2452 __ cmp(eax, factory()->the_hole_value()); | 1459 __ cmp(eax, factory()->the_hole_value()); |
| 2453 __ j(equal, &miss); | 1460 __ j(equal, &miss); |
| 2454 } else if (FLAG_debug_code) { | 1461 } else if (FLAG_debug_code) { |
| 2455 __ cmp(eax, factory()->the_hole_value()); | 1462 __ cmp(eax, factory()->the_hole_value()); |
| 2456 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); | 1463 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole); |
| 2457 } | 1464 } |
| 2458 | 1465 |
| 2459 HandlerFrontendFooter(name, &miss); | |
| 2460 | |
| 2461 Counters* counters = isolate()->counters(); | 1466 Counters* counters = isolate()->counters(); |
| 2462 __ IncrementCounter(counters->named_load_global_stub(), 1); | 1467 __ IncrementCounter(counters->named_load_global_stub(), 1); |
| 2463 // The code above already loads the result into the return register. | 1468 // The code above already loads the result into the return register. |
| 2464 __ ret(0); | 1469 __ ret(0); |
| 2465 | 1470 |
| 1471 HandlerFrontendFooter(name, &miss); |
| 1472 |
| 2466 // Return the generated code. | 1473 // Return the generated code. |
| 2467 return GetCode(kind(), Code::NORMAL, name); | 1474 return GetCode(kind(), Code::NORMAL, name); |
| 2468 } | 1475 } |
| 2469 | 1476 |
| 2470 | 1477 |
| 2471 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( | 1478 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( |
| 2472 TypeHandleList* types, | 1479 TypeHandleList* types, |
| 2473 CodeHandleList* handlers, | 1480 CodeHandleList* handlers, |
| 2474 Handle<Name> name, | 1481 Handle<Name> name, |
| 2475 Code::StubType type, | 1482 Code::StubType type, |
| 2476 IcCheckType check) { | 1483 IcCheckType check) { |
| 2477 Label miss; | 1484 Label miss; |
| 2478 | 1485 |
| 2479 if (check == PROPERTY && | 1486 if (check == PROPERTY && |
| 2480 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) { | 1487 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) { |
| 2481 __ cmp(this->name(), Immediate(name)); | 1488 __ cmp(this->name(), Immediate(name)); |
| 2482 __ j(not_equal, &miss); | 1489 __ j(not_equal, &miss); |
| 2483 } | 1490 } |
| 2484 | 1491 |
| 2485 Label number_case; | 1492 Label number_case; |
| 2486 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; | 1493 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; |
| 2487 __ JumpIfSmi(receiver(), smi_target); | 1494 __ JumpIfSmi(receiver(), smi_target); |
| 2488 | 1495 |
| 2489 Register map_reg = scratch1(); | 1496 Register map_reg = scratch1(); |
| 2490 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); | 1497 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset)); |
| 2491 int receiver_count = types->length(); | 1498 int receiver_count = types->length(); |
| 2492 int number_of_handled_maps = 0; | 1499 int number_of_handled_maps = 0; |
| 2493 for (int current = 0; current < receiver_count; ++current) { | 1500 for (int current = 0; current < receiver_count; ++current) { |
| 2494 Handle<Type> type = types->at(current); | 1501 Handle<HeapType> type = types->at(current); |
| 2495 Handle<Map> map = IC::TypeToMap(*type, isolate()); | 1502 Handle<Map> map = IC::TypeToMap(*type, isolate()); |
| 2496 if (!map->is_deprecated()) { | 1503 if (!map->is_deprecated()) { |
| 2497 number_of_handled_maps++; | 1504 number_of_handled_maps++; |
| 2498 __ cmp(map_reg, map); | 1505 __ cmp(map_reg, map); |
| 2499 if (type->Is(Type::Number())) { | 1506 if (type->Is(HeapType::Number())) { |
| 2500 ASSERT(!number_case.is_unused()); | 1507 ASSERT(!number_case.is_unused()); |
| 2501 __ bind(&number_case); | 1508 __ bind(&number_case); |
| 2502 } | 1509 } |
| 2503 __ j(equal, handlers->at(current)); | 1510 __ j(equal, handlers->at(current)); |
| 2504 } | 1511 } |
| 2505 } | 1512 } |
| 2506 ASSERT(number_of_handled_maps != 0); | 1513 ASSERT(number_of_handled_maps != 0); |
| 2507 | 1514 |
| 2508 __ bind(&miss); | 1515 __ bind(&miss); |
| 2509 TailCallBuiltin(masm(), MissBuiltin(kind())); | 1516 TailCallBuiltin(masm(), MissBuiltin(kind())); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2561 // ----------------------------------- | 1568 // ----------------------------------- |
| 2562 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1569 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
| 2563 } | 1570 } |
| 2564 | 1571 |
| 2565 | 1572 |
| 2566 #undef __ | 1573 #undef __ |
| 2567 | 1574 |
| 2568 } } // namespace v8::internal | 1575 } } // namespace v8::internal |
| 2569 | 1576 |
| 2570 #endif // V8_TARGET_ARCH_IA32 | 1577 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |