| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
| 8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 1220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1231 | 1231 |
| 1232 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | 1232 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
| 1233 // ----------- S t a t e ------------- | 1233 // ----------- S t a t e ------------- |
| 1234 // -- rax : argc | 1234 // -- rax : argc |
| 1235 // -- rsp[0] : return address | 1235 // -- rsp[0] : return address |
| 1236 // -- rsp[8] : last argument | 1236 // -- rsp[8] : last argument |
| 1237 // ----------------------------------- | 1237 // ----------------------------------- |
| 1238 Label generic_array_code; | 1238 Label generic_array_code; |
| 1239 | 1239 |
| 1240 // Get the InternalArray function. | 1240 // Get the InternalArray function. |
| 1241 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); | 1241 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); |
| 1242 | 1242 |
| 1243 if (FLAG_debug_code) { | 1243 if (FLAG_debug_code) { |
| 1244 // Initial map for the builtin InternalArray functions should be maps. | 1244 // Initial map for the builtin InternalArray functions should be maps. |
| 1245 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 1245 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1246 // Will both indicate a NULL and a Smi. | 1246 // Will both indicate a NULL and a Smi. |
| 1247 STATIC_ASSERT(kSmiTag == 0); | 1247 STATIC_ASSERT(kSmiTag == 0); |
| 1248 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); | 1248 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); |
| 1249 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); | 1249 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); |
| 1250 __ CmpObjectType(rbx, MAP_TYPE, rcx); | 1250 __ CmpObjectType(rbx, MAP_TYPE, rcx); |
| 1251 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); | 1251 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); |
| 1252 } | 1252 } |
| 1253 | 1253 |
| 1254 // Run the native code for the InternalArray function called as a normal | 1254 // Run the native code for the InternalArray function called as a normal |
| 1255 // function. | 1255 // function. |
| 1256 // tail call a stub | 1256 // tail call a stub |
| 1257 InternalArrayConstructorStub stub(masm->isolate()); | 1257 InternalArrayConstructorStub stub(masm->isolate()); |
| 1258 __ TailCallStub(&stub); | 1258 __ TailCallStub(&stub); |
| 1259 } | 1259 } |
| 1260 | 1260 |
| 1261 | 1261 |
| 1262 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | 1262 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
| 1263 // ----------- S t a t e ------------- | 1263 // ----------- S t a t e ------------- |
| 1264 // -- rax : argc | 1264 // -- rax : argc |
| 1265 // -- rsp[0] : return address | 1265 // -- rsp[0] : return address |
| 1266 // -- rsp[8] : last argument | 1266 // -- rsp[8] : last argument |
| 1267 // ----------------------------------- | 1267 // ----------------------------------- |
| 1268 Label generic_array_code; | 1268 Label generic_array_code; |
| 1269 | 1269 |
| 1270 // Get the Array function. | 1270 // Get the Array function. |
| 1271 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi); | 1271 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi); |
| 1272 | 1272 |
| 1273 if (FLAG_debug_code) { | 1273 if (FLAG_debug_code) { |
| 1274 // Initial map for the builtin Array functions should be maps. | 1274 // Initial map for the builtin Array functions should be maps. |
| 1275 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 1275 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1276 // Will both indicate a NULL and a Smi. | 1276 // Will both indicate a NULL and a Smi. |
| 1277 STATIC_ASSERT(kSmiTag == 0); | 1277 STATIC_ASSERT(kSmiTag == 0); |
| 1278 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); | 1278 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); |
| 1279 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | 1279 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); |
| 1280 __ CmpObjectType(rbx, MAP_TYPE, rcx); | 1280 __ CmpObjectType(rbx, MAP_TYPE, rcx); |
| 1281 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | 1281 __ Check(equal, kUnexpectedInitialMapForArrayFunction); |
| (...skipping 496 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1778 // 2. Call to something else, which might have a [[Call]] internal method (if | 1778 // 2. Call to something else, which might have a [[Call]] internal method (if |
| 1779 // not we raise an exception). | 1779 // not we raise an exception). |
| 1780 __ bind(&non_function); | 1780 __ bind(&non_function); |
| 1781 // Check if target has a [[Call]] internal method. | 1781 // Check if target has a [[Call]] internal method. |
| 1782 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | 1782 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), |
| 1783 Immediate(1 << Map::kIsCallable)); | 1783 Immediate(1 << Map::kIsCallable)); |
| 1784 __ j(zero, &non_callable, Label::kNear); | 1784 __ j(zero, &non_callable, Label::kNear); |
| 1785 // Overwrite the original receiver with the (original) target. | 1785 // Overwrite the original receiver with the (original) target. |
| 1786 __ movp(args.GetReceiverOperand(), rdi); | 1786 __ movp(args.GetReceiverOperand(), rdi); |
| 1787 // Let the "call_as_function_delegate" take care of the rest. | 1787 // Let the "call_as_function_delegate" take care of the rest. |
| 1788 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi); | 1788 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi); |
| 1789 __ Jump(masm->isolate()->builtins()->CallFunction( | 1789 __ Jump(masm->isolate()->builtins()->CallFunction( |
| 1790 ConvertReceiverMode::kNotNullOrUndefined), | 1790 ConvertReceiverMode::kNotNullOrUndefined), |
| 1791 RelocInfo::CODE_TARGET); | 1791 RelocInfo::CODE_TARGET); |
| 1792 | 1792 |
| 1793 // 3. Call to something that is not callable. | 1793 // 3. Call to something that is not callable. |
| 1794 __ bind(&non_callable); | 1794 __ bind(&non_callable); |
| 1795 { | 1795 { |
| 1796 FrameScope scope(masm, StackFrame::INTERNAL); | 1796 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1797 __ Push(rdi); | 1797 __ Push(rdi); |
| 1798 __ CallRuntime(Runtime::kThrowCalledNonCallable, 1); | 1798 __ CallRuntime(Runtime::kThrowCalledNonCallable, 1); |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1863 // Check if target has a [[Construct]] internal method. | 1863 // Check if target has a [[Construct]] internal method. |
| 1864 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), | 1864 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), |
| 1865 Immediate(1 << Map::kIsConstructor)); | 1865 Immediate(1 << Map::kIsConstructor)); |
| 1866 __ j(zero, &non_constructor, Label::kNear); | 1866 __ j(zero, &non_constructor, Label::kNear); |
| 1867 | 1867 |
| 1868 // Called Construct on an exotic Object with a [[Construct]] internal method. | 1868 // Called Construct on an exotic Object with a [[Construct]] internal method. |
| 1869 { | 1869 { |
| 1870 // Overwrite the original receiver with the (original) target. | 1870 // Overwrite the original receiver with the (original) target. |
| 1871 __ movp(args.GetReceiverOperand(), rdi); | 1871 __ movp(args.GetReceiverOperand(), rdi); |
| 1872 // Let the "call_as_constructor_delegate" take care of the rest. | 1872 // Let the "call_as_constructor_delegate" take care of the rest. |
| 1873 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi); | 1873 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi); |
| 1874 __ Jump(masm->isolate()->builtins()->CallFunction(), | 1874 __ Jump(masm->isolate()->builtins()->CallFunction(), |
| 1875 RelocInfo::CODE_TARGET); | 1875 RelocInfo::CODE_TARGET); |
| 1876 } | 1876 } |
| 1877 | 1877 |
| 1878 // Called Construct on an Object that doesn't have a [[Construct]] internal | 1878 // Called Construct on an Object that doesn't have a [[Construct]] internal |
| 1879 // method. | 1879 // method. |
| 1880 __ bind(&non_constructor); | 1880 __ bind(&non_constructor); |
| 1881 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), | 1881 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), |
| 1882 RelocInfo::CODE_TARGET); | 1882 RelocInfo::CODE_TARGET); |
| 1883 } | 1883 } |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1985 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed); | 1985 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed); |
| 1986 | 1986 |
| 1987 // Get the callback offset from the FunctionTemplateInfo, and jump to the | 1987 // Get the callback offset from the FunctionTemplateInfo, and jump to the |
| 1988 // beginning of the code. | 1988 // beginning of the code. |
| 1989 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset)); | 1989 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset)); |
| 1990 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset)); | 1990 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset)); |
| 1991 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 1991 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 1992 __ jmp(rdx); | 1992 __ jmp(rdx); |
| 1993 | 1993 |
| 1994 __ bind(&set_global_proxy); | 1994 __ bind(&set_global_proxy); |
| 1995 __ movp(rcx, NativeContextOperand()); | 1995 __ movp(rcx, GlobalObjectOperand()); |
| 1996 __ movp(rcx, ContextOperand(rcx, Context::GLOBAL_PROXY_INDEX)); | 1996 __ movp(rcx, FieldOperand(rcx, JSGlobalObject::kGlobalProxyOffset)); |
| 1997 __ movp(args.GetReceiverOperand(), rcx); | 1997 __ movp(args.GetReceiverOperand(), rcx); |
| 1998 __ jmp(&valid_receiver, Label::kNear); | 1998 __ jmp(&valid_receiver, Label::kNear); |
| 1999 | 1999 |
| 2000 // Compatible receiver check failed: pop return address, arguments and | 2000 // Compatible receiver check failed: pop return address, arguments and |
| 2001 // receiver and throw an Illegal Invocation exception. | 2001 // receiver and throw an Illegal Invocation exception. |
| 2002 __ bind(&receiver_check_failed); | 2002 __ bind(&receiver_check_failed); |
| 2003 __ PopReturnAddressTo(rbx); | 2003 __ PopReturnAddressTo(rbx); |
| 2004 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize)); | 2004 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize)); |
| 2005 __ addp(rsp, rax); | 2005 __ addp(rsp, rax); |
| 2006 __ PushReturnAddressFrom(rbx); | 2006 __ PushReturnAddressFrom(rbx); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2063 __ ret(0); | 2063 __ ret(0); |
| 2064 } | 2064 } |
| 2065 | 2065 |
| 2066 | 2066 |
| 2067 #undef __ | 2067 #undef __ |
| 2068 | 2068 |
| 2069 } // namespace internal | 2069 } // namespace internal |
| 2070 } // namespace v8 | 2070 } // namespace v8 |
| 2071 | 2071 |
| 2072 #endif // V8_TARGET_ARCH_X64 | 2072 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |