| Index: src/x64/stub-cache-x64.cc
|
| ===================================================================
|
| --- src/x64/stub-cache-x64.cc (revision 5829)
|
| +++ src/x64/stub-cache-x64.cc (working copy)
|
| @@ -497,6 +497,8 @@
|
| __ ret(0);
|
| }
|
|
|
| +// Number of pointers to be reserved on stack for fast API call.
|
| +static const int kFastApiCallArguments = 3;
|
|
|
| // Reserves space for the extra arguments to FastHandleApiCall in the
|
| // caller's frame.
|
| @@ -508,48 +510,48 @@
|
| // -- rsp[8] : last argument in the internal frame of the caller
|
| // -----------------------------------
|
| __ movq(scratch, Operand(rsp, 0));
|
| - __ subq(rsp, Immediate(4 * kPointerSize));
|
| + __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
|
| __ movq(Operand(rsp, 0), scratch);
|
| __ Move(scratch, Smi::FromInt(0));
|
| - __ movq(Operand(rsp, 1 * kPointerSize), scratch);
|
| - __ movq(Operand(rsp, 2 * kPointerSize), scratch);
|
| - __ movq(Operand(rsp, 3 * kPointerSize), scratch);
|
| - __ movq(Operand(rsp, 4 * kPointerSize), scratch);
|
| + for (int i = 1; i <= kFastApiCallArguments; i++) {
|
| + __ movq(Operand(rsp, i * kPointerSize), scratch);
|
| + }
|
| }
|
|
|
|
|
| // Undoes the effects of ReserveSpaceForFastApiCall.
|
| static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
|
| // ----------- S t a t e -------------
|
| - // -- rsp[0] : return address
|
| - // -- rsp[8] : last fast api call extra argument
|
| + // -- rsp[0] : return address.
|
| + // -- rsp[8] : last fast api call extra argument.
|
| // -- ...
|
| - // -- rsp[32] : first fast api call extra argument
|
| - // -- rsp[40] : last argument in the internal frame
|
| + // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
|
| + // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
|
| + // frame.
|
| // -----------------------------------
|
| __ movq(scratch, Operand(rsp, 0));
|
| - __ movq(Operand(rsp, 4 * kPointerSize), scratch);
|
| - __ addq(rsp, Immediate(kPointerSize * 4));
|
| + __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
|
| + __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
|
| }
|
|
|
|
|
| // Generates call to FastHandleApiCall builtin.
|
| -static void GenerateFastApiCall(MacroAssembler* masm,
|
| +static bool GenerateFastApiCall(MacroAssembler* masm,
|
| const CallOptimization& optimization,
|
| - int argc) {
|
| + int argc,
|
| + Failure** failure) {
|
| // ----------- S t a t e -------------
|
| // -- rsp[0] : return address
|
| // -- rsp[8] : object passing the type check
|
| // (last fast api call extra argument,
|
| // set by CheckPrototypes)
|
| - // -- rsp[16] : api call data
|
| - // -- rsp[24] : api callback
|
| - // -- rsp[32] : api function
|
| + // -- rsp[16] : api function
|
| // (first fast api call extra argument)
|
| - // -- rsp[40] : last argument
|
| + // -- rsp[24] : api call data
|
| + // -- rsp[32] : last argument
|
| // -- ...
|
| - // -- rsp[(argc + 5) * 8] : first argument
|
| - // -- rsp[(argc + 6) * 8] : receiver
|
| + // -- rsp[(argc + 3) * 8] : first argument
|
| + // -- rsp[(argc + 4) * 8] : receiver
|
| // -----------------------------------
|
|
|
| // Get the function and setup the context.
|
| @@ -558,37 +560,57 @@
|
| __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
|
|
|
| // Pass the additional arguments FastHandleApiCall expects.
|
| - __ movq(Operand(rsp, 4 * kPointerSize), rdi);
|
| - bool info_loaded = false;
|
| - Object* callback = optimization.api_call_info()->callback();
|
| - if (Heap::InNewSpace(callback)) {
|
| - info_loaded = true;
|
| - __ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
|
| - __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kCallbackOffset));
|
| - __ movq(Operand(rsp, 3 * kPointerSize), rbx);
|
| - } else {
|
| - __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(callback));
|
| - }
|
| + __ movq(Operand(rsp, 2 * kPointerSize), rdi);
|
| Object* call_data = optimization.api_call_info()->data();
|
| + Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
|
| if (Heap::InNewSpace(call_data)) {
|
| - if (!info_loaded) {
|
| - __ Move(rcx, Handle<CallHandlerInfo>(optimization.api_call_info()));
|
| - }
|
| + __ Move(rcx, api_call_info_handle);
|
| __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
|
| - __ movq(Operand(rsp, 2 * kPointerSize), rbx);
|
| + __ movq(Operand(rsp, 3 * kPointerSize), rbx);
|
| } else {
|
| - __ Move(Operand(rsp, 2 * kPointerSize), Handle<Object>(call_data));
|
| + __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
|
| }
|
|
|
| - // Set the number of arguments.
|
| - __ movq(rax, Immediate(argc + 4));
|
| + // Prepare arguments.
|
| + __ lea(rbx, Operand(rsp, 3 * kPointerSize));
|
|
|
| - // Jump to the fast api call builtin (tail call).
|
| - Handle<Code> code = Handle<Code>(
|
| - Builtins::builtin(Builtins::FastHandleApiCall));
|
| - ParameterCount expected(0);
|
| - __ InvokeCode(code, expected, expected,
|
| - RelocInfo::CODE_TARGET, JUMP_FUNCTION);
|
| + Object* callback = optimization.api_call_info()->callback();
|
| + Address api_function_address = v8::ToCData<Address>(callback);
|
| + ApiFunction fun(api_function_address);
|
| +
|
| +#ifdef _WIN64
|
| + // Win64 uses first register--rcx--for returned value.
|
| + Register arguments_arg = rdx;
|
| +#else
|
| + Register arguments_arg = rdi;
|
| +#endif
|
| +
|
| + // Allocate the v8::Arguments structure in the arguments' space since
|
| + // it's not controlled by GC.
|
| + const int kApiStackSpace = 4;
|
| +
|
| + __ PrepareCallApiFunction(argc + kFastApiCallArguments + 1, kApiStackSpace);
|
| +
|
| + __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
|
| + __ addq(rbx, Immediate(argc * kPointerSize));
|
| + __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
|
| + __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
|
| + // v8::Arguments::is_construct_call_.
|
| + __ Set(StackSpaceOperand(3), 0);
|
| +
|
| + // v8::InvocationCallback's argument.
|
| + __ lea(arguments_arg, StackSpaceOperand(0));
|
| + // Emitting a stub call may try to allocate (if the code is not
|
| + // already generated). Do not allow the assembler to perform a
|
| + // garbage collection but instead return the allocation failure
|
| + // object.
|
| + MaybeObject* result =
|
| + masm->TryCallApiFunctionAndReturn(&fun);
|
| + if (result->IsFailure()) {
|
| + *failure = Failure::cast(result);
|
| + return false;
|
| + }
|
| + return true;
|
| }
|
|
|
|
|
| @@ -601,7 +623,7 @@
|
| arguments_(arguments),
|
| name_(name) {}
|
|
|
| - void Compile(MacroAssembler* masm,
|
| + bool Compile(MacroAssembler* masm,
|
| JSObject* object,
|
| JSObject* holder,
|
| String* name,
|
| @@ -610,7 +632,8 @@
|
| Register scratch1,
|
| Register scratch2,
|
| Register scratch3,
|
| - Label* miss) {
|
| + Label* miss,
|
| + Failure** failure) {
|
| ASSERT(holder->HasNamedInterceptor());
|
| ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
|
|
|
| @@ -620,17 +643,18 @@
|
| CallOptimization optimization(lookup);
|
|
|
| if (optimization.is_constant_call()) {
|
| - CompileCacheable(masm,
|
| - object,
|
| - receiver,
|
| - scratch1,
|
| - scratch2,
|
| - scratch3,
|
| - holder,
|
| - lookup,
|
| - name,
|
| - optimization,
|
| - miss);
|
| + return CompileCacheable(masm,
|
| + object,
|
| + receiver,
|
| + scratch1,
|
| + scratch2,
|
| + scratch3,
|
| + holder,
|
| + lookup,
|
| + name,
|
| + optimization,
|
| + miss,
|
| + failure);
|
| } else {
|
| CompileRegular(masm,
|
| object,
|
| @@ -641,11 +665,12 @@
|
| name,
|
| holder,
|
| miss);
|
| + return true;
|
| }
|
| }
|
|
|
| private:
|
| - void CompileCacheable(MacroAssembler* masm,
|
| + bool CompileCacheable(MacroAssembler* masm,
|
| JSObject* object,
|
| Register receiver,
|
| Register scratch1,
|
| @@ -655,7 +680,8 @@
|
| LookupResult* lookup,
|
| String* name,
|
| const CallOptimization& optimization,
|
| - Label* miss_label) {
|
| + Label* miss_label,
|
| + Failure** failure) {
|
| ASSERT(optimization.is_constant_call());
|
| ASSERT(!lookup->holder()->IsGlobalObject());
|
|
|
| @@ -717,7 +743,13 @@
|
|
|
| // Invoke function.
|
| if (can_do_fast_api_call) {
|
| - GenerateFastApiCall(masm, optimization, arguments_.immediate());
|
| + bool success = GenerateFastApiCall(masm,
|
| + optimization,
|
| + arguments_.immediate(),
|
| + failure);
|
| + if (!success) {
|
| + return false;
|
| + }
|
| } else {
|
| __ InvokeFunction(optimization.constant_function(), arguments_,
|
| JUMP_FUNCTION);
|
| @@ -735,6 +767,8 @@
|
| if (can_do_fast_api_call) {
|
| FreeSpaceForFastApiCall(masm, scratch1);
|
| }
|
| +
|
| + return true;
|
| }
|
|
|
| void CompileRegular(MacroAssembler* masm,
|
| @@ -1036,7 +1070,11 @@
|
| }
|
|
|
| if (depth != kInvalidProtoDepth) {
|
| - GenerateFastApiCall(masm(), optimization, argc);
|
| + Failure* failure;
|
| + bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
|
| + if (!success) {
|
| + return failure;
|
| + }
|
| } else {
|
| __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
|
| }
|
| @@ -1723,16 +1761,21 @@
|
| __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
|
|
|
| CallInterceptorCompiler compiler(this, arguments(), rcx);
|
| - compiler.Compile(masm(),
|
| - object,
|
| - holder,
|
| - name,
|
| - &lookup,
|
| - rdx,
|
| - rbx,
|
| - rdi,
|
| - rax,
|
| - &miss);
|
| + Failure* failure;
|
| + bool success = compiler.Compile(masm(),
|
| + object,
|
| + holder,
|
| + name,
|
| + &lookup,
|
| + rdx,
|
| + rbx,
|
| + rdi,
|
| + rax,
|
| + &miss,
|
| + &failure);
|
| + if (!success) {
|
| + return failure;
|
| + }
|
|
|
| // Restore receiver.
|
| __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
|
|
|