OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_X64_MACRO_ASSEMBLER_X64_H_ | 5 #ifndef V8_X64_MACRO_ASSEMBLER_X64_H_ |
6 #define V8_X64_MACRO_ASSEMBLER_X64_H_ | 6 #define V8_X64_MACRO_ASSEMBLER_X64_H_ |
7 | 7 |
8 #include "src/assembler.h" | 8 #include "src/assembler.h" |
9 #include "src/bailout-reason.h" | 9 #include "src/bailout-reason.h" |
10 #include "src/base/flags.h" | 10 #include "src/base/flags.h" |
(...skipping 380 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
391 void InvokeFunction(Handle<JSFunction> function, | 391 void InvokeFunction(Handle<JSFunction> function, |
392 const ParameterCount& expected, | 392 const ParameterCount& expected, |
393 const ParameterCount& actual, | 393 const ParameterCount& actual, |
394 InvokeFlag flag, | 394 InvokeFlag flag, |
395 const CallWrapper& call_wrapper); | 395 const CallWrapper& call_wrapper); |
396 | 396 |
397 // Invoke specified builtin JavaScript function. | 397 // Invoke specified builtin JavaScript function. |
398 void InvokeBuiltin(int native_context_index, InvokeFlag flag, | 398 void InvokeBuiltin(int native_context_index, InvokeFlag flag, |
399 const CallWrapper& call_wrapper = NullCallWrapper()); | 399 const CallWrapper& call_wrapper = NullCallWrapper()); |
400 | 400 |
401 // Store the function for the given builtin in the target register. | |
402 void GetBuiltinFunction(Register target, int native_context_index); | |
403 | |
404 // --------------------------------------------------------------------------- | 401 // --------------------------------------------------------------------------- |
405 // Smi tagging, untagging and operations on tagged smis. | 402 // Smi tagging, untagging and operations on tagged smis. |
406 | 403 |
407 // Support for constant splitting. | 404 // Support for constant splitting. |
408 bool IsUnsafeInt(const int32_t x); | 405 bool IsUnsafeInt(const int32_t x); |
409 void SafeMove(Register dst, Smi* src); | 406 void SafeMove(Register dst, Smi* src); |
410 void SafePush(Smi* src); | 407 void SafePush(Smi* src); |
411 | 408 |
412 // Conversions between tagged smi values and non-tagged integer values. | 409 // Conversions between tagged smi values and non-tagged integer values. |
413 | 410 |
(...skipping 933 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1347 | 1344 |
1348 // Picks out an array index from the hash field. | 1345 // Picks out an array index from the hash field. |
1349 // Register use: | 1346 // Register use: |
1350 // hash - holds the index's hash. Clobbered. | 1347 // hash - holds the index's hash. Clobbered. |
1351 // index - holds the overwritten index on exit. | 1348 // index - holds the overwritten index on exit. |
1352 void IndexFromHash(Register hash, Register index); | 1349 void IndexFromHash(Register hash, Register index); |
1353 | 1350 |
1354 // Find the function context up the context chain. | 1351 // Find the function context up the context chain. |
1355 void LoadContext(Register dst, int context_chain_length); | 1352 void LoadContext(Register dst, int context_chain_length); |
1356 | 1353 |
| 1354 // Load the global object from the current context. |
| 1355 void LoadGlobalObject(Register dst) { |
| 1356 LoadNativeContextSlot(Context::EXTENSION_INDEX, dst); |
| 1357 } |
| 1358 |
1357 // Load the global proxy from the current context. | 1359 // Load the global proxy from the current context. |
1358 void LoadGlobalProxy(Register dst); | 1360 void LoadGlobalProxy(Register dst) { |
| 1361 LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst); |
| 1362 } |
1359 | 1363 |
1360 // Conditionally load the cached Array transitioned map of type | 1364 // Conditionally load the cached Array transitioned map of type |
1361 // transitioned_kind from the native context if the map in register | 1365 // transitioned_kind from the native context if the map in register |
1362 // map_in_out is the cached Array map in the native context of | 1366 // map_in_out is the cached Array map in the native context of |
1363 // expected_kind. | 1367 // expected_kind. |
1364 void LoadTransitionedArrayMapConditional( | 1368 void LoadTransitionedArrayMapConditional( |
1365 ElementsKind expected_kind, | 1369 ElementsKind expected_kind, |
1366 ElementsKind transitioned_kind, | 1370 ElementsKind transitioned_kind, |
1367 Register map_in_out, | 1371 Register map_in_out, |
1368 Register scratch, | 1372 Register scratch, |
1369 Label* no_map_match); | 1373 Label* no_map_match); |
1370 | 1374 |
1371 // Load the global function with the given index. | 1375 // Load the native context slot with the current index. |
1372 void LoadGlobalFunction(int index, Register function); | 1376 void LoadNativeContextSlot(int index, Register dst); |
1373 | 1377 |
1374 // Load the initial map from the global function. The registers | 1378 // Load the initial map from the global function. The registers |
1375 // function and map can be the same. | 1379 // function and map can be the same. |
1376 void LoadGlobalFunctionInitialMap(Register function, Register map); | 1380 void LoadGlobalFunctionInitialMap(Register function, Register map); |
1377 | 1381 |
1378 // --------------------------------------------------------------------------- | 1382 // --------------------------------------------------------------------------- |
1379 // Runtime calls | 1383 // Runtime calls |
1380 | 1384 |
1381 // Call a code stub. | 1385 // Call a code stub. |
1382 void CallStub(CodeStub* stub, TypeFeedbackId ast_id = TypeFeedbackId::None()); | 1386 void CallStub(CodeStub* stub, TypeFeedbackId ast_id = TypeFeedbackId::None()); |
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1687 inline Operand ContextOperand(Register context, int index) { | 1691 inline Operand ContextOperand(Register context, int index) { |
1688 return Operand(context, Context::SlotOffset(index)); | 1692 return Operand(context, Context::SlotOffset(index)); |
1689 } | 1693 } |
1690 | 1694 |
1691 | 1695 |
1692 inline Operand ContextOperand(Register context, Register index) { | 1696 inline Operand ContextOperand(Register context, Register index) { |
1693 return Operand(context, index, times_pointer_size, Context::SlotOffset(0)); | 1697 return Operand(context, index, times_pointer_size, Context::SlotOffset(0)); |
1694 } | 1698 } |
1695 | 1699 |
1696 | 1700 |
1697 inline Operand GlobalObjectOperand() { | 1701 inline Operand NativeContextOperand() { |
1698 return ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX); | 1702 return ContextOperand(rsi, Context::NATIVE_CONTEXT_INDEX); |
1699 } | 1703 } |
1700 | 1704 |
1701 | 1705 |
1702 // Provides access to exit frame stack space (not GCed). | 1706 // Provides access to exit frame stack space (not GCed). |
1703 inline Operand StackSpaceOperand(int index) { | 1707 inline Operand StackSpaceOperand(int index) { |
1704 #ifdef _WIN64 | 1708 #ifdef _WIN64 |
1705 const int kShaddowSpace = 4; | 1709 const int kShaddowSpace = 4; |
1706 return Operand(rsp, (index + kShaddowSpace) * kPointerSize); | 1710 return Operand(rsp, (index + kShaddowSpace) * kPointerSize); |
1707 #else | 1711 #else |
1708 return Operand(rsp, index * kPointerSize); | 1712 return Operand(rsp, index * kPointerSize); |
(...skipping 23 matching lines...) Expand all Loading... |
1732 } \ | 1736 } \ |
1733 masm-> | 1737 masm-> |
1734 #else | 1738 #else |
1735 #define ACCESS_MASM(masm) masm-> | 1739 #define ACCESS_MASM(masm) masm-> |
1736 #endif | 1740 #endif |
1737 | 1741 |
1738 } // namespace internal | 1742 } // namespace internal |
1739 } // namespace v8 | 1743 } // namespace v8 |
1740 | 1744 |
1741 #endif // V8_X64_MACRO_ASSEMBLER_X64_H_ | 1745 #endif // V8_X64_MACRO_ASSEMBLER_X64_H_ |
OLD | NEW |