| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 423 | 423 |
| 424 // Return and remove the on-stack parameter. | 424 // Return and remove the on-stack parameter. |
| 425 __ ret(1 * kPointerSize); | 425 __ ret(1 * kPointerSize); |
| 426 | 426 |
| 427 __ bind(&restore); | 427 __ bind(&restore); |
| 428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
| 429 __ jmp(&install_unoptimized); | 429 __ jmp(&install_unoptimized); |
| 430 | 430 |
| 431 // Create a new closure through the slower runtime call. | 431 // Create a new closure through the slower runtime call. |
| 432 __ bind(&gc); | 432 __ bind(&gc); |
| 433 __ pop(rcx); // Temporarily remove return address. | 433 __ PopReturnAddressTo(rcx); |
| 434 __ pop(rdx); | 434 __ pop(rdx); |
| 435 __ push(rsi); | 435 __ push(rsi); |
| 436 __ push(rdx); | 436 __ push(rdx); |
| 437 __ PushRoot(Heap::kFalseValueRootIndex); | 437 __ PushRoot(Heap::kFalseValueRootIndex); |
| 438 __ push(rcx); // Restore return address. | 438 __ PushReturnAddressFrom(rcx); |
| 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
| 440 } | 440 } |
| 441 | 441 |
| 442 | 442 |
| 443 void FastNewContextStub::Generate(MacroAssembler* masm) { | 443 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 444 // Try to allocate the context in new space. | 444 // Try to allocate the context in new space. |
| 445 Label gc; | 445 Label gc; |
| 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, | 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
| 448 rax, rbx, rcx, &gc, TAG_OBJECT); | 448 rax, rbx, rcx, &gc, TAG_OBJECT); |
| (...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 688 __ pop(save_reg); | 688 __ pop(save_reg); |
| 689 __ pop(scratch1); | 689 __ pop(scratch1); |
| 690 __ ret(0); | 690 __ ret(0); |
| 691 } | 691 } |
| 692 | 692 |
| 693 | 693 |
| 694 void BinaryOpStub::Initialize() {} | 694 void BinaryOpStub::Initialize() {} |
| 695 | 695 |
| 696 | 696 |
| 697 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 697 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 698 __ pop(rcx); // Save return address. | 698 __ PopReturnAddressTo(rcx); |
| 699 __ push(rdx); | 699 __ push(rdx); |
| 700 __ push(rax); | 700 __ push(rax); |
| 701 // Left and right arguments are now on top. | 701 // Left and right arguments are now on top. |
| 702 __ Push(Smi::FromInt(MinorKey())); | 702 __ Push(Smi::FromInt(MinorKey())); |
| 703 | 703 |
| 704 __ push(rcx); // Push return address. | 704 __ PushReturnAddressFrom(rcx); |
| 705 | 705 |
| 706 // Patch the caller to an appropriate specialized stub and return the | 706 // Patch the caller to an appropriate specialized stub and return the |
| 707 // operation result to the caller of the stub. | 707 // operation result to the caller of the stub. |
| 708 __ TailCallExternalReference( | 708 __ TailCallExternalReference( |
| 709 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), | 709 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
| 710 masm->isolate()), | 710 masm->isolate()), |
| 711 3, | 711 3, |
| 712 1); | 712 1); |
| 713 } | 713 } |
| 714 | 714 |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 977 __ Abort("Unexpected fall-through in " | 977 __ Abort("Unexpected fall-through in " |
| 978 "BinaryStub_GenerateFloatingPointCode."); | 978 "BinaryStub_GenerateFloatingPointCode."); |
| 979 } | 979 } |
| 980 } | 980 } |
| 981 | 981 |
| 982 | 982 |
| 983 static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn( | 983 static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn( |
| 984 MacroAssembler* masm) { | 984 MacroAssembler* masm) { |
| 985 // Push arguments, but ensure they are under the return address | 985 // Push arguments, but ensure they are under the return address |
| 986 // for a tail call. | 986 // for a tail call. |
| 987 __ pop(rcx); | 987 __ PopReturnAddressTo(rcx); |
| 988 __ push(rdx); | 988 __ push(rdx); |
| 989 __ push(rax); | 989 __ push(rax); |
| 990 __ push(rcx); | 990 __ PushReturnAddressFrom(rcx); |
| 991 } | 991 } |
| 992 | 992 |
| 993 | 993 |
| 994 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { | 994 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
| 995 ASSERT(op_ == Token::ADD); | 995 ASSERT(op_ == Token::ADD); |
| 996 Label left_not_string, call_runtime; | 996 Label left_not_string, call_runtime; |
| 997 | 997 |
| 998 // Registers containing left and right operands respectively. | 998 // Registers containing left and right operands respectively. |
| 999 Register left = rdx; | 999 Register left = rdx; |
| 1000 Register right = rax; | 1000 Register right = rax; |
| (...skipping 1147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2148 // property might have been redefined. | 2148 // property might have been redefined. |
| 2149 __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); | 2149 __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); |
| 2150 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), | 2150 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), |
| 2151 Heap::kHashTableMapRootIndex); | 2151 Heap::kHashTableMapRootIndex); |
| 2152 __ j(equal, &miss); | 2152 __ j(equal, &miss); |
| 2153 | 2153 |
| 2154 // Check that value is a smi. | 2154 // Check that value is a smi. |
| 2155 __ JumpIfNotSmi(value, &miss); | 2155 __ JumpIfNotSmi(value, &miss); |
| 2156 | 2156 |
| 2157 // Prepare tail call to StoreIC_ArrayLength. | 2157 // Prepare tail call to StoreIC_ArrayLength. |
| 2158 __ pop(scratch); | 2158 __ PopReturnAddressTo(scratch); |
| 2159 __ push(receiver); | 2159 __ push(receiver); |
| 2160 __ push(value); | 2160 __ push(value); |
| 2161 __ push(scratch); // return address | 2161 __ PushReturnAddressFrom(scratch); |
| 2162 | 2162 |
| 2163 ExternalReference ref = | 2163 ExternalReference ref = |
| 2164 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate()); | 2164 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate()); |
| 2165 __ TailCallExternalReference(ref, 2, 1); | 2165 __ TailCallExternalReference(ref, 2, 1); |
| 2166 | 2166 |
| 2167 __ bind(&miss); | 2167 __ bind(&miss); |
| 2168 | 2168 |
| 2169 StubCompiler::TailCallBuiltin( | 2169 StubCompiler::TailCallBuiltin( |
| 2170 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); | 2170 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
| 2171 } | 2171 } |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2217 // Read the argument from the stack and return it. | 2217 // Read the argument from the stack and return it. |
| 2218 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2); | 2218 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2); |
| 2219 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); | 2219 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); |
| 2220 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); | 2220 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); |
| 2221 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); | 2221 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); |
| 2222 __ Ret(); | 2222 __ Ret(); |
| 2223 | 2223 |
| 2224 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 2224 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 2225 // by calling the runtime system. | 2225 // by calling the runtime system. |
| 2226 __ bind(&slow); | 2226 __ bind(&slow); |
| 2227 __ pop(rbx); // Return address. | 2227 __ PopReturnAddressTo(rbx); |
| 2228 __ push(rdx); | 2228 __ push(rdx); |
| 2229 __ push(rbx); | 2229 __ PushReturnAddressFrom(rbx); |
| 2230 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); | 2230 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); |
| 2231 } | 2231 } |
| 2232 | 2232 |
| 2233 | 2233 |
| 2234 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { | 2234 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
| 2235 // Stack layout: | 2235 // Stack layout: |
| 2236 // rsp[0] : return address | 2236 // rsp[0] : return address |
| 2237 // rsp[8] : number of parameters (tagged) | 2237 // rsp[8] : number of parameters (tagged) |
| 2238 // rsp[16] : receiver displacement | 2238 // rsp[16] : receiver displacement |
| 2239 // rsp[24] : function | 2239 // rsp[24] : function |
| (...skipping 1239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3479 // undefined, and are equal. | 3479 // undefined, and are equal. |
| 3480 __ Set(rax, EQUAL); | 3480 __ Set(rax, EQUAL); |
| 3481 __ bind(&return_unequal); | 3481 __ bind(&return_unequal); |
| 3482 // Return non-equal by returning the non-zero object pointer in rax, | 3482 // Return non-equal by returning the non-zero object pointer in rax, |
| 3483 // or return equal if we fell through to here. | 3483 // or return equal if we fell through to here. |
| 3484 __ ret(0); | 3484 __ ret(0); |
| 3485 __ bind(¬_both_objects); | 3485 __ bind(¬_both_objects); |
| 3486 } | 3486 } |
| 3487 | 3487 |
| 3488 // Push arguments below the return address to prepare jump to builtin. | 3488 // Push arguments below the return address to prepare jump to builtin. |
| 3489 __ pop(rcx); | 3489 __ PopReturnAddressTo(rcx); |
| 3490 __ push(rdx); | 3490 __ push(rdx); |
| 3491 __ push(rax); | 3491 __ push(rax); |
| 3492 | 3492 |
| 3493 // Figure out which native to call and setup the arguments. | 3493 // Figure out which native to call and setup the arguments. |
| 3494 Builtins::JavaScript builtin; | 3494 Builtins::JavaScript builtin; |
| 3495 if (cc == equal) { | 3495 if (cc == equal) { |
| 3496 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | 3496 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; |
| 3497 } else { | 3497 } else { |
| 3498 builtin = Builtins::COMPARE; | 3498 builtin = Builtins::COMPARE; |
| 3499 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); | 3499 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); |
| 3500 } | 3500 } |
| 3501 | 3501 |
| 3502 // Restore return address on the stack. | 3502 __ PushReturnAddressFrom(rcx); |
| 3503 __ push(rcx); | |
| 3504 | 3503 |
| 3505 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 3504 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 3506 // tagged as a small integer. | 3505 // tagged as a small integer. |
| 3507 __ InvokeBuiltin(builtin, JUMP_FUNCTION); | 3506 __ InvokeBuiltin(builtin, JUMP_FUNCTION); |
| 3508 | 3507 |
| 3509 __ bind(&miss); | 3508 __ bind(&miss); |
| 3510 GenerateMiss(masm); | 3509 GenerateMiss(masm); |
| 3511 } | 3510 } |
| 3512 | 3511 |
| 3513 | 3512 |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3662 if (RecordCallTarget()) { | 3661 if (RecordCallTarget()) { |
| 3663 // If there is a call target cache, mark it megamorphic in the | 3662 // If there is a call target cache, mark it megamorphic in the |
| 3664 // non-function case. MegamorphicSentinel is an immortal immovable | 3663 // non-function case. MegamorphicSentinel is an immortal immovable |
| 3665 // object (undefined) so no write barrier is needed. | 3664 // object (undefined) so no write barrier is needed. |
| 3666 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 3665 __ Move(FieldOperand(rbx, Cell::kValueOffset), |
| 3667 TypeFeedbackCells::MegamorphicSentinel(isolate)); | 3666 TypeFeedbackCells::MegamorphicSentinel(isolate)); |
| 3668 } | 3667 } |
| 3669 // Check for function proxy. | 3668 // Check for function proxy. |
| 3670 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 3669 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
| 3671 __ j(not_equal, &non_function); | 3670 __ j(not_equal, &non_function); |
| 3672 __ pop(rcx); | 3671 __ PopReturnAddressTo(rcx); |
| 3673 __ push(rdi); // put proxy as additional argument under return address | 3672 __ push(rdi); // put proxy as additional argument under return address |
| 3674 __ push(rcx); | 3673 __ PushReturnAddressFrom(rcx); |
| 3675 __ Set(rax, argc_ + 1); | 3674 __ Set(rax, argc_ + 1); |
| 3676 __ Set(rbx, 0); | 3675 __ Set(rbx, 0); |
| 3677 __ SetCallKind(rcx, CALL_AS_METHOD); | 3676 __ SetCallKind(rcx, CALL_AS_METHOD); |
| 3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 3677 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
| 3679 { | 3678 { |
| 3680 Handle<Code> adaptor = | 3679 Handle<Code> adaptor = |
| 3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3680 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3682 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 3681 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 3683 } | 3682 } |
| 3684 | 3683 |
| (...skipping 657 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 4341 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); | 4342 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 4344 } | 4343 } |
| 4345 } | 4344 } |
| 4346 __ ret(2 * kPointerSize + extra_stack_space); | 4345 __ ret(2 * kPointerSize + extra_stack_space); |
| 4347 | 4346 |
| 4348 // Slow-case: Go through the JavaScript implementation. | 4347 // Slow-case: Go through the JavaScript implementation. |
| 4349 __ bind(&slow); | 4348 __ bind(&slow); |
| 4350 if (HasCallSiteInlineCheck()) { | 4349 if (HasCallSiteInlineCheck()) { |
| 4351 // Remove extra value from the stack. | 4350 // Remove extra value from the stack. |
| 4352 __ pop(rcx); | 4351 __ PopReturnAddressTo(rcx); |
| 4353 __ pop(rax); | 4352 __ pop(rax); |
| 4354 __ push(rcx); | 4353 __ PushReturnAddressFrom(rcx); |
| 4355 } | 4354 } |
| 4356 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 4355 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 4357 } | 4356 } |
| 4358 | 4357 |
| 4359 | 4358 |
| 4360 // Passing arguments in registers is not supported. | 4359 // Passing arguments in registers is not supported. |
| 4361 Register InstanceofStub::left() { return no_reg; } | 4360 Register InstanceofStub::left() { return no_reg; } |
| 4362 | 4361 |
| 4363 | 4362 |
| 4364 Register InstanceofStub::right() { return no_reg; } | 4363 Register InstanceofStub::right() { return no_reg; } |
| (...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4815 | 4814 |
| 4816 | 4815 |
| 4817 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 4816 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
| 4818 __ push(rax); | 4817 __ push(rax); |
| 4819 __ push(rdx); | 4818 __ push(rdx); |
| 4820 } | 4819 } |
| 4821 | 4820 |
| 4822 | 4821 |
| 4823 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm, | 4822 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm, |
| 4824 Register temp) { | 4823 Register temp) { |
| 4825 __ pop(temp); | 4824 __ PopReturnAddressTo(temp); |
| 4826 __ pop(rdx); | 4825 __ pop(rdx); |
| 4827 __ pop(rax); | 4826 __ pop(rax); |
| 4828 __ push(temp); | 4827 __ PushReturnAddressFrom(temp); |
| 4829 } | 4828 } |
| 4830 | 4829 |
| 4831 | 4830 |
| 4832 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, | 4831 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, |
| 4833 int stack_offset, | 4832 int stack_offset, |
| 4834 Register arg, | 4833 Register arg, |
| 4835 Register scratch1, | 4834 Register scratch1, |
| 4836 Register scratch2, | 4835 Register scratch2, |
| 4837 Register scratch3, | 4836 Register scratch3, |
| 4838 Label* slow) { | 4837 Label* slow) { |
| (...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5522 __ ret(2 * kPointerSize); | 5521 __ ret(2 * kPointerSize); |
| 5523 | 5522 |
| 5524 __ bind(¬_same); | 5523 __ bind(¬_same); |
| 5525 | 5524 |
| 5526 // Check that both are sequential ASCII strings. | 5525 // Check that both are sequential ASCII strings. |
| 5527 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); | 5526 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); |
| 5528 | 5527 |
| 5529 // Inline comparison of ASCII strings. | 5528 // Inline comparison of ASCII strings. |
| 5530 __ IncrementCounter(counters->string_compare_native(), 1); | 5529 __ IncrementCounter(counters->string_compare_native(), 1); |
| 5531 // Drop arguments from the stack | 5530 // Drop arguments from the stack |
| 5532 __ pop(rcx); | 5531 __ PopReturnAddressTo(rcx); |
| 5533 __ addq(rsp, Immediate(2 * kPointerSize)); | 5532 __ addq(rsp, Immediate(2 * kPointerSize)); |
| 5534 __ push(rcx); | 5533 __ PushReturnAddressFrom(rcx); |
| 5535 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 5534 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
| 5536 | 5535 |
| 5537 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 5536 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 5538 // tagged as a small integer. | 5537 // tagged as a small integer. |
| 5539 __ bind(&runtime); | 5538 __ bind(&runtime); |
| 5540 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 5539 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 5541 } | 5540 } |
| 5542 | 5541 |
| 5543 | 5542 |
| 5544 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 5543 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| (...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5793 if (equality) { | 5792 if (equality) { |
| 5794 StringCompareStub::GenerateFlatAsciiStringEquals( | 5793 StringCompareStub::GenerateFlatAsciiStringEquals( |
| 5795 masm, left, right, tmp1, tmp2); | 5794 masm, left, right, tmp1, tmp2); |
| 5796 } else { | 5795 } else { |
| 5797 StringCompareStub::GenerateCompareFlatAsciiStrings( | 5796 StringCompareStub::GenerateCompareFlatAsciiStrings( |
| 5798 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); | 5797 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); |
| 5799 } | 5798 } |
| 5800 | 5799 |
| 5801 // Handle more complex cases in runtime. | 5800 // Handle more complex cases in runtime. |
| 5802 __ bind(&runtime); | 5801 __ bind(&runtime); |
| 5803 __ pop(tmp1); // Return address. | 5802 __ PopReturnAddressTo(tmp1); |
| 5804 __ push(left); | 5803 __ push(left); |
| 5805 __ push(right); | 5804 __ push(right); |
| 5806 __ push(tmp1); | 5805 __ PushReturnAddressFrom(tmp1); |
| 5807 if (equality) { | 5806 if (equality) { |
| 5808 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); | 5807 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); |
| 5809 } else { | 5808 } else { |
| 5810 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 5809 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 5811 } | 5810 } |
| 5812 | 5811 |
| 5813 __ bind(&miss); | 5812 __ bind(&miss); |
| 5814 GenerateMiss(masm); | 5813 GenerateMiss(masm); |
| 5815 } | 5814 } |
| 5816 | 5815 |
| (...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6404 __ CheckFastElements(rdi, &double_elements); | 6403 __ CheckFastElements(rdi, &double_elements); |
| 6405 | 6404 |
| 6406 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS | 6405 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS |
| 6407 __ JumpIfSmi(rax, &smi_element); | 6406 __ JumpIfSmi(rax, &smi_element); |
| 6408 __ CheckFastSmiElements(rdi, &fast_elements); | 6407 __ CheckFastSmiElements(rdi, &fast_elements); |
| 6409 | 6408 |
| 6410 // Store into the array literal requires a elements transition. Call into | 6409 // Store into the array literal requires a elements transition. Call into |
| 6411 // the runtime. | 6410 // the runtime. |
| 6412 | 6411 |
| 6413 __ bind(&slow_elements); | 6412 __ bind(&slow_elements); |
| 6414 __ pop(rdi); // Pop return address and remember to put back later for tail | 6413 __ PopReturnAddressTo(rdi); |
| 6415 // call. | |
| 6416 __ push(rbx); | 6414 __ push(rbx); |
| 6417 __ push(rcx); | 6415 __ push(rcx); |
| 6418 __ push(rax); | 6416 __ push(rax); |
| 6419 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 6417 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 6420 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 6418 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
| 6421 __ push(rdx); | 6419 __ push(rdx); |
| 6422 __ push(rdi); // Return return address so that tail call returns to right | 6420 __ PushReturnAddressFrom(rdi); |
| 6423 // place. | |
| 6424 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); | 6421 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); |
| 6425 | 6422 |
| 6426 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. | 6423 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. |
| 6427 __ bind(&fast_elements); | 6424 __ bind(&fast_elements); |
| 6428 __ SmiToInteger32(kScratchRegister, rcx); | 6425 __ SmiToInteger32(kScratchRegister, rcx); |
| 6429 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); | 6426 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); |
| 6430 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, | 6427 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, |
| 6431 FixedArrayBase::kHeaderSize)); | 6428 FixedArrayBase::kHeaderSize)); |
| 6432 __ movq(Operand(rcx, 0), rax); | 6429 __ movq(Operand(rcx, 0), rax); |
| 6433 // Update the write barrier for the array store. | 6430 // Update the write barrier for the array store. |
| (...skipping 26 matching lines...) Expand all Loading... |
| 6460 } | 6457 } |
| 6461 | 6458 |
| 6462 | 6459 |
| 6463 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 6460 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
| 6464 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | 6461 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
| 6465 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 6462 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
| 6466 int parameter_count_offset = | 6463 int parameter_count_offset = |
| 6467 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 6464 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
| 6468 __ movq(rbx, MemOperand(rbp, parameter_count_offset)); | 6465 __ movq(rbx, MemOperand(rbp, parameter_count_offset)); |
| 6469 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 6466 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
| 6470 __ pop(rcx); | 6467 __ PopReturnAddressTo(rcx); |
| 6471 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 6468 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
| 6472 ? kPointerSize | 6469 ? kPointerSize |
| 6473 : 0; | 6470 : 0; |
| 6474 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); | 6471 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); |
| 6475 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. | 6472 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. |
| 6476 } | 6473 } |
| 6477 | 6474 |
| 6478 | 6475 |
| 6479 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 6476 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
| 6480 if (masm->isolate()->function_entry_hook() != NULL) { | 6477 if (masm->isolate()->function_entry_hook() != NULL) { |
| (...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6816 __ bind(&fast_elements_case); | 6813 __ bind(&fast_elements_case); |
| 6817 GenerateCase(masm, FAST_ELEMENTS); | 6814 GenerateCase(masm, FAST_ELEMENTS); |
| 6818 } | 6815 } |
| 6819 | 6816 |
| 6820 | 6817 |
| 6821 #undef __ | 6818 #undef __ |
| 6822 | 6819 |
| 6823 } } // namespace v8::internal | 6820 } } // namespace v8::internal |
| 6824 | 6821 |
| 6825 #endif // V8_TARGET_ARCH_X64 | 6822 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |