Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(584)

Side by Side Diff: src/x64/assembler-x64.cc

Issue 2084017: Version 2.2.11... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 10 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64.h ('k') | src/x64/builtins-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_X64)
31
30 #include "macro-assembler.h" 32 #include "macro-assembler.h"
31 #include "serialize.h" 33 #include "serialize.h"
32 34
33 namespace v8 { 35 namespace v8 {
34 namespace internal { 36 namespace internal {
35 37
36 // ----------------------------------------------------------------------------- 38 // -----------------------------------------------------------------------------
37 // Implementation of CpuFeatures 39 // Implementation of CpuFeatures
38 40
39 // The required user mode extensions in X64 are (from AMD64 ABI Table A.1): 41 // The required user mode extensions in X64 are (from AMD64 ABI Table A.1):
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after
451 last_pc_ = pc_; 453 last_pc_ = pc_;
452 emit_rex_64(reg, op); 454 emit_rex_64(reg, op);
453 emit(opcode); 455 emit(opcode);
454 emit_operand(reg, op); 456 emit_operand(reg, op);
455 } 457 }
456 458
457 459
458 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) { 460 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
459 EnsureSpace ensure_space(this); 461 EnsureSpace ensure_space(this);
460 last_pc_ = pc_; 462 last_pc_ = pc_;
461 emit_rex_64(reg, rm_reg); 463 ASSERT((opcode & 0xC6) == 2);
462 emit(opcode); 464 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
463 emit_modrm(reg, rm_reg); 465 // Swap reg and rm_reg and change opcode operand order.
466 emit_rex_64(rm_reg, reg);
467 emit(opcode ^ 0x02);
468 emit_modrm(rm_reg, reg);
469 } else {
470 emit_rex_64(reg, rm_reg);
471 emit(opcode);
472 emit_modrm(reg, rm_reg);
473 }
464 } 474 }
465 475
466 476
467 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) { 477 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
468 EnsureSpace ensure_space(this); 478 EnsureSpace ensure_space(this);
469 last_pc_ = pc_; 479 last_pc_ = pc_;
470 emit(0x66); 480 ASSERT((opcode & 0xC6) == 2);
471 emit_optional_rex_32(reg, rm_reg); 481 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
472 emit(opcode); 482 // Swap reg and rm_reg and change opcode operand order.
473 emit_modrm(reg, rm_reg); 483 emit(0x66);
484 emit_optional_rex_32(rm_reg, reg);
485 emit(opcode ^ 0x02);
486 emit_modrm(rm_reg, reg);
487 } else {
488 emit(0x66);
489 emit_optional_rex_32(reg, rm_reg);
490 emit(opcode);
491 emit_modrm(reg, rm_reg);
492 }
474 } 493 }
475 494
476 495
477 void Assembler::arithmetic_op_16(byte opcode, 496 void Assembler::arithmetic_op_16(byte opcode,
478 Register reg, 497 Register reg,
479 const Operand& rm_reg) { 498 const Operand& rm_reg) {
480 EnsureSpace ensure_space(this); 499 EnsureSpace ensure_space(this);
481 last_pc_ = pc_; 500 last_pc_ = pc_;
482 emit(0x66); 501 emit(0x66);
483 emit_optional_rex_32(reg, rm_reg); 502 emit_optional_rex_32(reg, rm_reg);
484 emit(opcode); 503 emit(opcode);
485 emit_operand(reg, rm_reg); 504 emit_operand(reg, rm_reg);
486 } 505 }
487 506
488 507
489 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) { 508 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
490 EnsureSpace ensure_space(this); 509 EnsureSpace ensure_space(this);
491 last_pc_ = pc_; 510 last_pc_ = pc_;
492 emit_optional_rex_32(reg, rm_reg); 511 ASSERT((opcode & 0xC6) == 2);
493 emit(opcode); 512 if (rm_reg.low_bits() == 4) { // Forces SIB byte.
494 emit_modrm(reg, rm_reg); 513 // Swap reg and rm_reg and change opcode operand order.
514 emit_optional_rex_32(rm_reg, reg);
515 emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
516 emit_modrm(rm_reg, reg);
517 } else {
518 emit_optional_rex_32(reg, rm_reg);
519 emit(opcode);
520 emit_modrm(reg, rm_reg);
521 }
495 } 522 }
496 523
497 524
498 void Assembler::arithmetic_op_32(byte opcode, 525 void Assembler::arithmetic_op_32(byte opcode,
499 Register reg, 526 Register reg,
500 const Operand& rm_reg) { 527 const Operand& rm_reg) {
501 EnsureSpace ensure_space(this); 528 EnsureSpace ensure_space(this);
502 last_pc_ = pc_; 529 last_pc_ = pc_;
503 emit_optional_rex_32(reg, rm_reg); 530 emit_optional_rex_32(reg, rm_reg);
504 emit(opcode); 531 emit(opcode);
(...skipping 778 matching lines...) Expand 10 before | Expand all | Expand 10 after
1283 last_pc_ = pc_; 1310 last_pc_ = pc_;
1284 emit_optional_rex_32(dst, src); 1311 emit_optional_rex_32(dst, src);
1285 emit(0x8B); 1312 emit(0x8B);
1286 emit_operand(dst, src); 1313 emit_operand(dst, src);
1287 } 1314 }
1288 1315
1289 1316
1290 void Assembler::movl(Register dst, Register src) { 1317 void Assembler::movl(Register dst, Register src) {
1291 EnsureSpace ensure_space(this); 1318 EnsureSpace ensure_space(this);
1292 last_pc_ = pc_; 1319 last_pc_ = pc_;
1293 emit_optional_rex_32(dst, src); 1320 if (src.low_bits() == 4) {
1294 emit(0x8B); 1321 emit_optional_rex_32(src, dst);
1295 emit_modrm(dst, src); 1322 emit(0x89);
1323 emit_modrm(src, dst);
1324 } else {
1325 emit_optional_rex_32(dst, src);
1326 emit(0x8B);
1327 emit_modrm(dst, src);
1328 }
1296 } 1329 }
1297 1330
1298 1331
1299 void Assembler::movl(const Operand& dst, Register src) { 1332 void Assembler::movl(const Operand& dst, Register src) {
1300 EnsureSpace ensure_space(this); 1333 EnsureSpace ensure_space(this);
1301 last_pc_ = pc_; 1334 last_pc_ = pc_;
1302 emit_optional_rex_32(src, dst); 1335 emit_optional_rex_32(src, dst);
1303 emit(0x89); 1336 emit(0x89);
1304 emit_operand(src, dst); 1337 emit_operand(src, dst);
1305 } 1338 }
(...skipping 24 matching lines...) Expand all
1330 last_pc_ = pc_; 1363 last_pc_ = pc_;
1331 emit_rex_64(dst, src); 1364 emit_rex_64(dst, src);
1332 emit(0x8B); 1365 emit(0x8B);
1333 emit_operand(dst, src); 1366 emit_operand(dst, src);
1334 } 1367 }
1335 1368
1336 1369
1337 void Assembler::movq(Register dst, Register src) { 1370 void Assembler::movq(Register dst, Register src) {
1338 EnsureSpace ensure_space(this); 1371 EnsureSpace ensure_space(this);
1339 last_pc_ = pc_; 1372 last_pc_ = pc_;
1340 emit_rex_64(dst, src); 1373 if (src.low_bits() == 4) {
1341 emit(0x8B); 1374 emit_rex_64(src, dst);
1342 emit_modrm(dst, src); 1375 emit(0x89);
1376 emit_modrm(src, dst);
1377 } else {
1378 emit_rex_64(dst, src);
1379 emit(0x8B);
1380 emit_modrm(dst, src);
1381 }
1343 } 1382 }
1344 1383
1345 1384
1346 void Assembler::movq(Register dst, Immediate value) { 1385 void Assembler::movq(Register dst, Immediate value) {
1347 EnsureSpace ensure_space(this); 1386 EnsureSpace ensure_space(this);
1348 last_pc_ = pc_; 1387 last_pc_ = pc_;
1349 emit_rex_64(dst); 1388 emit_rex_64(dst);
1350 emit(0xC7); 1389 emit(0xC7);
1351 emit_modrm(0x0, dst); 1390 emit_modrm(0x0, dst);
1352 emit(value); // Only 32-bit immediates are possible, not 8-bit immediates. 1391 emit(value); // Only 32-bit immediates are possible, not 8-bit immediates.
(...skipping 500 matching lines...) Expand 10 before | Expand all | Expand 10 after
1853 } 1892 }
1854 1893
1855 1894
1856 void Assembler::xchg(Register dst, Register src) { 1895 void Assembler::xchg(Register dst, Register src) {
1857 EnsureSpace ensure_space(this); 1896 EnsureSpace ensure_space(this);
1858 last_pc_ = pc_; 1897 last_pc_ = pc_;
1859 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding 1898 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
1860 Register other = src.is(rax) ? dst : src; 1899 Register other = src.is(rax) ? dst : src;
1861 emit_rex_64(other); 1900 emit_rex_64(other);
1862 emit(0x90 | other.low_bits()); 1901 emit(0x90 | other.low_bits());
1902 } else if (dst.low_bits() == 4) {
1903 emit_rex_64(dst, src);
1904 emit(0x87);
1905 emit_modrm(dst, src);
1863 } else { 1906 } else {
1864 emit_rex_64(src, dst); 1907 emit_rex_64(src, dst);
1865 emit(0x87); 1908 emit(0x87);
1866 emit_modrm(src, dst); 1909 emit_modrm(src, dst);
1867 } 1910 }
1868 } 1911 }
1869 1912
1870 1913
1871 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { 1914 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1872 EnsureSpace ensure_space(this); 1915 EnsureSpace ensure_space(this);
1873 last_pc_ = pc_; 1916 last_pc_ = pc_;
1874 emit(0x48); // REX.W 1917 emit(0x48); // REX.W
1875 emit(0xA3); 1918 emit(0xA3);
1876 emitq(reinterpret_cast<uintptr_t>(dst), mode); 1919 emitq(reinterpret_cast<uintptr_t>(dst), mode);
1877 } 1920 }
1878 1921
1879 1922
1880 void Assembler::store_rax(ExternalReference ref) { 1923 void Assembler::store_rax(ExternalReference ref) {
1881 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 1924 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1882 } 1925 }
1883 1926
1884 1927
1885 void Assembler::testb(Register dst, Register src) { 1928 void Assembler::testb(Register dst, Register src) {
1886 EnsureSpace ensure_space(this); 1929 EnsureSpace ensure_space(this);
1887 last_pc_ = pc_; 1930 last_pc_ = pc_;
1888 if (dst.code() > 3 || src.code() > 3) { 1931 if (src.low_bits() == 4) {
1889 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1932 emit_rex_32(src, dst);
1890 emit_rex_32(dst, src); 1933 emit(0x84);
1934 emit_modrm(src, dst);
1935 } else {
1936 if (dst.code() > 3 || src.code() > 3) {
1937 // Register is not one of al, bl, cl, dl. Its encoding needs REX.
1938 emit_rex_32(dst, src);
1939 }
1940 emit(0x84);
1941 emit_modrm(dst, src);
1891 } 1942 }
1892 emit(0x84);
1893 emit_modrm(dst, src);
1894 } 1943 }
1895 1944
1896 1945
1897 void Assembler::testb(Register reg, Immediate mask) { 1946 void Assembler::testb(Register reg, Immediate mask) {
1898 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 1947 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
1899 EnsureSpace ensure_space(this); 1948 EnsureSpace ensure_space(this);
1900 last_pc_ = pc_; 1949 last_pc_ = pc_;
1901 if (reg.is(rax)) { 1950 if (reg.is(rax)) {
1902 emit(0xA8); 1951 emit(0xA8);
1903 emit(mask.value_); // Low byte emitted. 1952 emit(mask.value_); // Low byte emitted.
(...skipping 30 matching lines...) Expand all
1934 emit_optional_rex_32(reg, op); 1983 emit_optional_rex_32(reg, op);
1935 } 1984 }
1936 emit(0x84); 1985 emit(0x84);
1937 emit_operand(reg, op); 1986 emit_operand(reg, op);
1938 } 1987 }
1939 1988
1940 1989
1941 void Assembler::testl(Register dst, Register src) { 1990 void Assembler::testl(Register dst, Register src) {
1942 EnsureSpace ensure_space(this); 1991 EnsureSpace ensure_space(this);
1943 last_pc_ = pc_; 1992 last_pc_ = pc_;
1944 emit_optional_rex_32(dst, src); 1993 if (src.low_bits() == 4) {
1945 emit(0x85); 1994 emit_optional_rex_32(src, dst);
1946 emit_modrm(dst, src); 1995 emit(0x85);
1996 emit_modrm(src, dst);
1997 } else {
1998 emit_optional_rex_32(dst, src);
1999 emit(0x85);
2000 emit_modrm(dst, src);
2001 }
1947 } 2002 }
1948 2003
1949 2004
1950 void Assembler::testl(Register reg, Immediate mask) { 2005 void Assembler::testl(Register reg, Immediate mask) {
1951 // testl with a mask that fits in the low byte is exactly testb. 2006 // testl with a mask that fits in the low byte is exactly testb.
1952 if (is_uint8(mask.value_)) { 2007 if (is_uint8(mask.value_)) {
1953 testb(reg, mask); 2008 testb(reg, mask);
1954 return; 2009 return;
1955 } 2010 }
1956 EnsureSpace ensure_space(this); 2011 EnsureSpace ensure_space(this);
(...skipping 30 matching lines...) Expand all
1987 last_pc_ = pc_; 2042 last_pc_ = pc_;
1988 emit_rex_64(reg, op); 2043 emit_rex_64(reg, op);
1989 emit(0x85); 2044 emit(0x85);
1990 emit_operand(reg, op); 2045 emit_operand(reg, op);
1991 } 2046 }
1992 2047
1993 2048
1994 void Assembler::testq(Register dst, Register src) { 2049 void Assembler::testq(Register dst, Register src) {
1995 EnsureSpace ensure_space(this); 2050 EnsureSpace ensure_space(this);
1996 last_pc_ = pc_; 2051 last_pc_ = pc_;
1997 emit_rex_64(dst, src); 2052 if (src.low_bits() == 4) {
1998 emit(0x85); 2053 emit_rex_64(src, dst);
1999 emit_modrm(dst, src); 2054 emit(0x85);
2055 emit_modrm(src, dst);
2056 } else {
2057 emit_rex_64(dst, src);
2058 emit(0x85);
2059 emit_modrm(dst, src);
2060 }
2000 } 2061 }
2001 2062
2002 2063
2003 void Assembler::testq(Register dst, Immediate mask) { 2064 void Assembler::testq(Register dst, Immediate mask) {
2004 EnsureSpace ensure_space(this); 2065 EnsureSpace ensure_space(this);
2005 last_pc_ = pc_; 2066 last_pc_ = pc_;
2006 if (dst.is(rax)) { 2067 if (dst.is(rax)) {
2007 emit_rex_64(); 2068 emit_rex_64();
2008 emit(0xA9); 2069 emit(0xA9);
2009 emit(mask); 2070 emit(mask);
(...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after
2732 RecordRelocInfo(RelocInfo::POSITION, current_position_); 2793 RecordRelocInfo(RelocInfo::POSITION, current_position_);
2733 written_position_ = current_position_; 2794 written_position_ = current_position_;
2734 } 2795 }
2735 } 2796 }
2736 2797
2737 2798
2738 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | 2799 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
2739 1 << RelocInfo::INTERNAL_REFERENCE | 2800 1 << RelocInfo::INTERNAL_REFERENCE |
2740 1 << RelocInfo::JS_RETURN; 2801 1 << RelocInfo::JS_RETURN;
2741 2802
2803
2804 bool RelocInfo::IsCodedSpecially() {
2805 // The deserializer needs to know whether a pointer is specially coded. Being
2806 // specially coded on x64 means that it is a relative 32 bit address, as used
2807 // by branch instructions.
2808 return (1 << rmode_) & kApplyMask;
2809 }
2810
2811
2812
2742 } } // namespace v8::internal 2813 } } // namespace v8::internal
2814
2815 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.h ('k') | src/x64/builtins-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698