OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
337 } | 337 } |
338 } | 338 } |
339 | 339 |
340 void MacroAssembler::Assert(Condition cc, const char* msg) { | 340 void MacroAssembler::Assert(Condition cc, const char* msg) { |
341 if (emit_debug_code()) Check(cc, msg); | 341 if (emit_debug_code()) Check(cc, msg); |
342 } | 342 } |
343 | 343 |
344 | 344 |
345 void MacroAssembler::AssertFastElements(Register elements) { | 345 void MacroAssembler::AssertFastElements(Register elements) { |
346 if (emit_debug_code()) { | 346 if (emit_debug_code()) { |
347 NearLabel ok; | 347 Label ok; |
348 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), | 348 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), |
349 Heap::kFixedArrayMapRootIndex); | 349 Heap::kFixedArrayMapRootIndex); |
350 j(equal, &ok); | 350 j(equal, &ok, Label::kNear); |
351 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), | 351 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), |
352 Heap::kFixedCOWArrayMapRootIndex); | 352 Heap::kFixedCOWArrayMapRootIndex); |
353 j(equal, &ok); | 353 j(equal, &ok, Label::kNear); |
354 Abort("JSObject with fast elements map has slow elements"); | 354 Abort("JSObject with fast elements map has slow elements"); |
355 bind(&ok); | 355 bind(&ok); |
356 } | 356 } |
357 } | 357 } |
358 | 358 |
359 | 359 |
360 void MacroAssembler::Check(Condition cc, const char* msg) { | 360 void MacroAssembler::Check(Condition cc, const char* msg) { |
361 NearLabel L; | 361 Label L; |
362 j(cc, &L); | 362 j(cc, &L, Label::kNear); |
363 Abort(msg); | 363 Abort(msg); |
364 // will not return here | 364 // will not return here |
365 bind(&L); | 365 bind(&L); |
366 } | 366 } |
367 | 367 |
368 | 368 |
369 void MacroAssembler::CheckStackAlignment() { | 369 void MacroAssembler::CheckStackAlignment() { |
370 int frame_alignment = OS::ActivationFrameAlignment(); | 370 int frame_alignment = OS::ActivationFrameAlignment(); |
371 int frame_alignment_mask = frame_alignment - 1; | 371 int frame_alignment_mask = frame_alignment - 1; |
372 if (frame_alignment > kPointerSize) { | 372 if (frame_alignment > kPointerSize) { |
373 ASSERT(IsPowerOf2(frame_alignment)); | 373 ASSERT(IsPowerOf2(frame_alignment)); |
374 NearLabel alignment_as_expected; | 374 Label alignment_as_expected; |
375 testq(rsp, Immediate(frame_alignment_mask)); | 375 testq(rsp, Immediate(frame_alignment_mask)); |
376 j(zero, &alignment_as_expected); | 376 j(zero, &alignment_as_expected, Label::kNear); |
377 // Abort if stack is not aligned. | 377 // Abort if stack is not aligned. |
378 int3(); | 378 int3(); |
379 bind(&alignment_as_expected); | 379 bind(&alignment_as_expected); |
380 } | 380 } |
381 } | 381 } |
382 | 382 |
383 | 383 |
384 void MacroAssembler::NegativeZeroTest(Register result, | 384 void MacroAssembler::NegativeZeroTest(Register result, |
385 Register op, | 385 Register op, |
386 Label* then_label) { | 386 Label* then_label) { |
387 NearLabel ok; | 387 Label ok; |
388 testl(result, result); | 388 testl(result, result); |
389 j(not_zero, &ok); | 389 j(not_zero, &ok, Label::kNear); |
390 testl(op, op); | 390 testl(op, op); |
391 j(sign, then_label); | 391 j(sign, then_label); |
392 bind(&ok); | 392 bind(&ok); |
393 } | 393 } |
394 | 394 |
395 | 395 |
396 void MacroAssembler::Abort(const char* msg) { | 396 void MacroAssembler::Abort(const char* msg) { |
397 // We want to pass the msg string like a smi to avoid GC | 397 // We want to pass the msg string like a smi to avoid GC |
398 // problems, however msg is not guaranteed to be aligned | 398 // problems, however msg is not guaranteed to be aligned |
399 // properly. Instead, we pass an aligned pointer that is | 399 // properly. Instead, we pass an aligned pointer that is |
(...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
825 | 825 |
826 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { | 826 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { |
827 if (emit_debug_code()) { | 827 if (emit_debug_code()) { |
828 movq(dst, | 828 movq(dst, |
829 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), | 829 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), |
830 RelocInfo::NONE); | 830 RelocInfo::NONE); |
831 cmpq(dst, kSmiConstantRegister); | 831 cmpq(dst, kSmiConstantRegister); |
832 if (allow_stub_calls()) { | 832 if (allow_stub_calls()) { |
833 Assert(equal, "Uninitialized kSmiConstantRegister"); | 833 Assert(equal, "Uninitialized kSmiConstantRegister"); |
834 } else { | 834 } else { |
835 NearLabel ok; | 835 Label ok; |
836 j(equal, &ok); | 836 j(equal, &ok, Label::kNear); |
837 int3(); | 837 int3(); |
838 bind(&ok); | 838 bind(&ok); |
839 } | 839 } |
840 } | 840 } |
841 int value = source->value(); | 841 int value = source->value(); |
842 if (value == 0) { | 842 if (value == 0) { |
843 xorl(dst, dst); | 843 xorl(dst, dst); |
844 return; | 844 return; |
845 } | 845 } |
846 bool negative = value < 0; | 846 bool negative = value < 0; |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
888 if (!dst.is(src)) { | 888 if (!dst.is(src)) { |
889 movl(dst, src); | 889 movl(dst, src); |
890 } | 890 } |
891 shl(dst, Immediate(kSmiShift)); | 891 shl(dst, Immediate(kSmiShift)); |
892 } | 892 } |
893 | 893 |
894 | 894 |
895 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { | 895 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { |
896 if (emit_debug_code()) { | 896 if (emit_debug_code()) { |
897 testb(dst, Immediate(0x01)); | 897 testb(dst, Immediate(0x01)); |
898 NearLabel ok; | 898 Label ok; |
899 j(zero, &ok); | 899 j(zero, &ok, Label::kNear); |
900 if (allow_stub_calls()) { | 900 if (allow_stub_calls()) { |
901 Abort("Integer32ToSmiField writing to non-smi location"); | 901 Abort("Integer32ToSmiField writing to non-smi location"); |
902 } else { | 902 } else { |
903 int3(); | 903 int3(); |
904 } | 904 } |
905 bind(&ok); | 905 bind(&ok); |
906 } | 906 } |
907 ASSERT(kSmiShift % kBitsPerByte == 0); | 907 ASSERT(kSmiShift % kBitsPerByte == 0); |
908 movl(Operand(dst, kSmiShift / kBitsPerByte), src); | 908 movl(Operand(dst, kSmiShift / kBitsPerByte), src); |
909 } | 909 } |
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1390 if (shift_value > 0) { | 1390 if (shift_value > 0) { |
1391 shl(dst, Immediate(shift_value)); | 1391 shl(dst, Immediate(shift_value)); |
1392 } | 1392 } |
1393 } | 1393 } |
1394 | 1394 |
1395 | 1395 |
1396 void MacroAssembler::SmiShiftLeft(Register dst, | 1396 void MacroAssembler::SmiShiftLeft(Register dst, |
1397 Register src1, | 1397 Register src1, |
1398 Register src2) { | 1398 Register src2) { |
1399 ASSERT(!dst.is(rcx)); | 1399 ASSERT(!dst.is(rcx)); |
1400 NearLabel result_ok; | |
1401 // Untag shift amount. | 1400 // Untag shift amount. |
1402 if (!dst.is(src1)) { | 1401 if (!dst.is(src1)) { |
1403 movq(dst, src1); | 1402 movq(dst, src1); |
1404 } | 1403 } |
1405 SmiToInteger32(rcx, src2); | 1404 SmiToInteger32(rcx, src2); |
1406 // Shift amount specified by lower 5 bits, not six as the shl opcode. | 1405 // Shift amount specified by lower 5 bits, not six as the shl opcode. |
1407 and_(rcx, Immediate(0x1f)); | 1406 and_(rcx, Immediate(0x1f)); |
1408 shl_cl(dst); | 1407 shl_cl(dst); |
1409 } | 1408 } |
1410 | 1409 |
(...skipping 364 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1775 Operand handler_operand = ExternalOperand(handler_address); | 1774 Operand handler_operand = ExternalOperand(handler_address); |
1776 movq(rsp, handler_operand); | 1775 movq(rsp, handler_operand); |
1777 // get next in chain | 1776 // get next in chain |
1778 pop(handler_operand); | 1777 pop(handler_operand); |
1779 pop(rbp); // pop frame pointer | 1778 pop(rbp); // pop frame pointer |
1780 pop(rdx); // remove state | 1779 pop(rdx); // remove state |
1781 | 1780 |
1782 // Before returning we restore the context from the frame pointer if not NULL. | 1781 // Before returning we restore the context from the frame pointer if not NULL. |
1783 // The frame pointer is NULL in the exception handler of a JS entry frame. | 1782 // The frame pointer is NULL in the exception handler of a JS entry frame. |
1784 Set(rsi, 0); // Tentatively set context pointer to NULL | 1783 Set(rsi, 0); // Tentatively set context pointer to NULL |
1785 NearLabel skip; | 1784 Label skip; |
1786 cmpq(rbp, Immediate(0)); | 1785 cmpq(rbp, Immediate(0)); |
1787 j(equal, &skip); | 1786 j(equal, &skip, Label::kNear); |
1788 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1787 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
1789 bind(&skip); | 1788 bind(&skip); |
1790 ret(0); | 1789 ret(0); |
1791 } | 1790 } |
1792 | 1791 |
1793 | 1792 |
1794 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, | 1793 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, |
1795 Register value) { | 1794 Register value) { |
1796 // Keep thrown value in rax. | 1795 // Keep thrown value in rax. |
1797 if (!value.is(rax)) { | 1796 if (!value.is(rax)) { |
1798 movq(rax, value); | 1797 movq(rax, value); |
1799 } | 1798 } |
1800 // Fetch top stack handler. | 1799 // Fetch top stack handler. |
1801 ExternalReference handler_address(Isolate::k_handler_address, isolate()); | 1800 ExternalReference handler_address(Isolate::k_handler_address, isolate()); |
1802 Load(rsp, handler_address); | 1801 Load(rsp, handler_address); |
1803 | 1802 |
1804 // Unwind the handlers until the ENTRY handler is found. | 1803 // Unwind the handlers until the ENTRY handler is found. |
1805 NearLabel loop, done; | 1804 Label loop, done; |
1806 bind(&loop); | 1805 bind(&loop); |
1807 // Load the type of the current stack handler. | 1806 // Load the type of the current stack handler. |
1808 const int kStateOffset = StackHandlerConstants::kStateOffset; | 1807 const int kStateOffset = StackHandlerConstants::kStateOffset; |
1809 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); | 1808 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); |
1810 j(equal, &done); | 1809 j(equal, &done, Label::kNear); |
1811 // Fetch the next handler in the list. | 1810 // Fetch the next handler in the list. |
1812 const int kNextOffset = StackHandlerConstants::kNextOffset; | 1811 const int kNextOffset = StackHandlerConstants::kNextOffset; |
1813 movq(rsp, Operand(rsp, kNextOffset)); | 1812 movq(rsp, Operand(rsp, kNextOffset)); |
1814 jmp(&loop); | 1813 jmp(&loop); |
1815 bind(&done); | 1814 bind(&done); |
1816 | 1815 |
1817 // Set the top handler address to next handler past the current ENTRY handler. | 1816 // Set the top handler address to next handler past the current ENTRY handler. |
1818 Operand handler_operand = ExternalOperand(handler_address); | 1817 Operand handler_operand = ExternalOperand(handler_address); |
1819 pop(handler_operand); | 1818 pop(handler_operand); |
1820 | 1819 |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1892 bool is_heap_object) { | 1891 bool is_heap_object) { |
1893 if (!is_heap_object) { | 1892 if (!is_heap_object) { |
1894 JumpIfSmi(obj, fail); | 1893 JumpIfSmi(obj, fail); |
1895 } | 1894 } |
1896 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); | 1895 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); |
1897 j(not_equal, fail); | 1896 j(not_equal, fail); |
1898 } | 1897 } |
1899 | 1898 |
1900 | 1899 |
1901 void MacroAssembler::AbortIfNotNumber(Register object) { | 1900 void MacroAssembler::AbortIfNotNumber(Register object) { |
1902 NearLabel ok; | 1901 Label ok; |
1903 Condition is_smi = CheckSmi(object); | 1902 Condition is_smi = CheckSmi(object); |
1904 j(is_smi, &ok); | 1903 j(is_smi, &ok, Label::kNear); |
1905 Cmp(FieldOperand(object, HeapObject::kMapOffset), | 1904 Cmp(FieldOperand(object, HeapObject::kMapOffset), |
1906 isolate()->factory()->heap_number_map()); | 1905 isolate()->factory()->heap_number_map()); |
1907 Assert(equal, "Operand not a number"); | 1906 Assert(equal, "Operand not a number"); |
1908 bind(&ok); | 1907 bind(&ok); |
1909 } | 1908 } |
1910 | 1909 |
1911 | 1910 |
1912 void MacroAssembler::AbortIfSmi(Register object) { | 1911 void MacroAssembler::AbortIfSmi(Register object) { |
1913 NearLabel ok; | |
1914 Condition is_smi = CheckSmi(object); | 1912 Condition is_smi = CheckSmi(object); |
1915 Assert(NegateCondition(is_smi), "Operand is a smi"); | 1913 Assert(NegateCondition(is_smi), "Operand is a smi"); |
1916 } | 1914 } |
1917 | 1915 |
1918 | 1916 |
1919 void MacroAssembler::AbortIfNotSmi(Register object) { | 1917 void MacroAssembler::AbortIfNotSmi(Register object) { |
1920 Condition is_smi = CheckSmi(object); | 1918 Condition is_smi = CheckSmi(object); |
1921 Assert(is_smi, "Operand is not a smi"); | 1919 Assert(is_smi, "Operand is not a smi"); |
1922 } | 1920 } |
1923 | 1921 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1966 Label* miss) { | 1964 Label* miss) { |
1967 // Check that the receiver isn't a smi. | 1965 // Check that the receiver isn't a smi. |
1968 testl(function, Immediate(kSmiTagMask)); | 1966 testl(function, Immediate(kSmiTagMask)); |
1969 j(zero, miss); | 1967 j(zero, miss); |
1970 | 1968 |
1971 // Check that the function really is a function. | 1969 // Check that the function really is a function. |
1972 CmpObjectType(function, JS_FUNCTION_TYPE, result); | 1970 CmpObjectType(function, JS_FUNCTION_TYPE, result); |
1973 j(not_equal, miss); | 1971 j(not_equal, miss); |
1974 | 1972 |
1975 // Make sure that the function has an instance prototype. | 1973 // Make sure that the function has an instance prototype. |
1976 NearLabel non_instance; | 1974 Label non_instance; |
1977 testb(FieldOperand(result, Map::kBitFieldOffset), | 1975 testb(FieldOperand(result, Map::kBitFieldOffset), |
1978 Immediate(1 << Map::kHasNonInstancePrototype)); | 1976 Immediate(1 << Map::kHasNonInstancePrototype)); |
1979 j(not_zero, &non_instance); | 1977 j(not_zero, &non_instance, Label::kNear); |
1980 | 1978 |
1981 // Get the prototype or initial map from the function. | 1979 // Get the prototype or initial map from the function. |
1982 movq(result, | 1980 movq(result, |
1983 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 1981 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
1984 | 1982 |
1985 // If the prototype or initial map is the hole, don't return it and | 1983 // If the prototype or initial map is the hole, don't return it and |
1986 // simply miss the cache instead. This will allow us to allocate a | 1984 // simply miss the cache instead. This will allow us to allocate a |
1987 // prototype object on-demand in the runtime system. | 1985 // prototype object on-demand in the runtime system. |
1988 CompareRoot(result, Heap::kTheHoleValueRootIndex); | 1986 CompareRoot(result, Heap::kTheHoleValueRootIndex); |
1989 j(equal, miss); | 1987 j(equal, miss); |
1990 | 1988 |
1991 // If the function does not have an initial map, we're done. | 1989 // If the function does not have an initial map, we're done. |
1992 NearLabel done; | 1990 Label done; |
1993 CmpObjectType(result, MAP_TYPE, kScratchRegister); | 1991 CmpObjectType(result, MAP_TYPE, kScratchRegister); |
1994 j(not_equal, &done); | 1992 j(not_equal, &done, Label::kNear); |
1995 | 1993 |
1996 // Get the prototype from the initial map. | 1994 // Get the prototype from the initial map. |
1997 movq(result, FieldOperand(result, Map::kPrototypeOffset)); | 1995 movq(result, FieldOperand(result, Map::kPrototypeOffset)); |
1998 jmp(&done); | 1996 jmp(&done, Label::kNear); |
1999 | 1997 |
2000 // Non-instance prototype: Fetch prototype from constructor field | 1998 // Non-instance prototype: Fetch prototype from constructor field |
2001 // in initial map. | 1999 // in initial map. |
2002 bind(&non_instance); | 2000 bind(&non_instance); |
2003 movq(result, FieldOperand(result, Map::kConstructorOffset)); | 2001 movq(result, FieldOperand(result, Map::kConstructorOffset)); |
2004 | 2002 |
2005 // All done. | 2003 // All done. |
2006 bind(&done); | 2004 bind(&done); |
2007 } | 2005 } |
2008 | 2006 |
(...skipping 903 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2912 CPU::FlushICache(address_, size_); | 2910 CPU::FlushICache(address_, size_); |
2913 | 2911 |
2914 // Check that the code was patched as expected. | 2912 // Check that the code was patched as expected. |
2915 ASSERT(masm_.pc_ == address_ + size_); | 2913 ASSERT(masm_.pc_ == address_ + size_); |
2916 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2914 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2917 } | 2915 } |
2918 | 2916 |
2919 } } // namespace v8::internal | 2917 } } // namespace v8::internal |
2920 | 2918 |
2921 #endif // V8_TARGET_ARCH_X64 | 2919 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |