| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" |
| 29 |
| 30 #if defined(V8_TARGET_ARCH_IA32) |
| 31 |
| 28 #include "ia32/lithium-codegen-ia32.h" | 32 #include "ia32/lithium-codegen-ia32.h" |
| 29 #include "code-stubs.h" | 33 #include "code-stubs.h" |
| 30 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 31 | 35 |
| 32 namespace v8 { | 36 namespace v8 { |
| 33 namespace internal { | 37 namespace internal { |
| 34 | 38 |
| 35 | 39 |
| 36 class SafepointGenerator : public PostCallGenerator { | 40 class SafepointGenerator : public PostCallGenerator { |
| 37 public: | 41 public: |
| (...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 254 // Local or spill slot. Skip the frame pointer, function, and | 258 // Local or spill slot. Skip the frame pointer, function, and |
| 255 // context in the fixed part of the frame. | 259 // context in the fixed part of the frame. |
| 256 return Operand(ebp, -(index + 3) * kPointerSize); | 260 return Operand(ebp, -(index + 3) * kPointerSize); |
| 257 } else { | 261 } else { |
| 258 // Incoming parameter. Skip the return address. | 262 // Incoming parameter. Skip the return address. |
| 259 return Operand(ebp, -(index - 1) * kPointerSize); | 263 return Operand(ebp, -(index - 1) * kPointerSize); |
| 260 } | 264 } |
| 261 } | 265 } |
| 262 | 266 |
| 263 | 267 |
| 268 void LCodeGen::WriteTranslation(LEnvironment* environment, |
| 269 Translation* translation) { |
| 270 if (environment == NULL) return; |
| 271 |
| 272 // The translation includes one command per value in the environment. |
| 273 int translation_size = environment->values()->length(); |
| 274 // The output frame height does not include the parameters. |
| 275 int height = translation_size - environment->parameter_count(); |
| 276 |
| 277 WriteTranslation(environment->outer(), translation); |
| 278 int closure_id = DefineDeoptimizationLiteral(environment->closure()); |
| 279 translation->BeginFrame(environment->ast_id(), closure_id, height); |
| 280 for (int i = 0; i < translation_size; ++i) { |
| 281 LOperand* value = environment->values()->at(i); |
| 282 // spilled_registers_ and spilled_double_registers_ are either |
| 283 // both NULL or both set. |
| 284 if (environment->spilled_registers() != NULL && value != NULL) { |
| 285 if (value->IsRegister() && |
| 286 environment->spilled_registers()[value->index()] != NULL) { |
| 287 translation->MarkDuplicate(); |
| 288 AddToTranslation(translation, |
| 289 environment->spilled_registers()[value->index()], |
| 290 environment->HasTaggedValueAt(i)); |
| 291 } else if ( |
| 292 value->IsDoubleRegister() && |
| 293 environment->spilled_double_registers()[value->index()] != NULL) { |
| 294 translation->MarkDuplicate(); |
| 295 AddToTranslation( |
| 296 translation, |
| 297 environment->spilled_double_registers()[value->index()], |
| 298 false); |
| 299 } |
| 300 } |
| 301 |
| 302 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); |
| 303 } |
| 304 } |
| 305 |
| 306 |
| 264 void LCodeGen::AddToTranslation(Translation* translation, | 307 void LCodeGen::AddToTranslation(Translation* translation, |
| 265 LOperand* op, | 308 LOperand* op, |
| 266 bool is_tagged) { | 309 bool is_tagged) { |
| 267 if (op == NULL) { | 310 if (op == NULL) { |
| 268 // TODO(twuerthinger): Introduce marker operands to indicate that this value | 311 // TODO(twuerthinger): Introduce marker operands to indicate that this value |
| 269 // is not present and must be reconstructed from the deoptimizer. Currently | 312 // is not present and must be reconstructed from the deoptimizer. Currently |
| 270 // this is only used for the arguments object. | 313 // this is only used for the arguments object. |
| 271 translation->StoreArgumentsObject(); | 314 translation->StoreArgumentsObject(); |
| 272 } else if (op->IsStackSlot()) { | 315 } else if (op->IsStackSlot()) { |
| 273 if (is_tagged) { | 316 if (is_tagged) { |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 378 // Layout of the translation: | 421 // Layout of the translation: |
| 379 // 0 ........................................................ size - 1 + 4 | 422 // 0 ........................................................ size - 1 + 4 |
| 380 // [expression stack including arguments] [locals] [4 words] [parameters] | 423 // [expression stack including arguments] [locals] [4 words] [parameters] |
| 381 // |>------------ translation_size ------------<| | 424 // |>------------ translation_size ------------<| |
| 382 | 425 |
| 383 int frame_count = 0; | 426 int frame_count = 0; |
| 384 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { | 427 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { |
| 385 ++frame_count; | 428 ++frame_count; |
| 386 } | 429 } |
| 387 Translation translation(&translations_, frame_count); | 430 Translation translation(&translations_, frame_count); |
| 388 environment->WriteTranslation(this, &translation); | 431 WriteTranslation(environment, &translation); |
| 389 int deoptimization_index = deoptimizations_.length(); | 432 int deoptimization_index = deoptimizations_.length(); |
| 390 environment->Register(deoptimization_index, translation.index()); | 433 environment->Register(deoptimization_index, translation.index()); |
| 391 deoptimizations_.Add(environment); | 434 deoptimizations_.Add(environment); |
| 392 } | 435 } |
| 393 } | 436 } |
| 394 | 437 |
| 395 | 438 |
| 396 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { | 439 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { |
| 397 RegisterEnvironmentForDeoptimization(environment); | 440 RegisterEnvironmentForDeoptimization(environment); |
| 398 ASSERT(environment->HasBeenRegistered()); | 441 ASSERT(environment->HasBeenRegistered()); |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 557 | 600 |
| 558 | 601 |
| 559 void LCodeGen::DoParallelMove(LParallelMove* move) { | 602 void LCodeGen::DoParallelMove(LParallelMove* move) { |
| 560 // xmm0 must always be a scratch register. | 603 // xmm0 must always be a scratch register. |
| 561 XMMRegister xmm_scratch = xmm0; | 604 XMMRegister xmm_scratch = xmm0; |
| 562 LUnallocated marker_operand(LUnallocated::NONE); | 605 LUnallocated marker_operand(LUnallocated::NONE); |
| 563 | 606 |
| 564 Register cpu_scratch = esi; | 607 Register cpu_scratch = esi; |
| 565 bool destroys_cpu_scratch = false; | 608 bool destroys_cpu_scratch = false; |
| 566 | 609 |
| 567 LGapResolver resolver(move->move_operands(), &marker_operand); | 610 const ZoneList<LMoveOperands>* moves = |
| 568 const ZoneList<LMoveOperands>* moves = resolver.ResolveInReverseOrder(); | 611 resolver_.Resolve(move->move_operands(), &marker_operand); |
| 569 for (int i = moves->length() - 1; i >= 0; --i) { | 612 for (int i = moves->length() - 1; i >= 0; --i) { |
| 570 LMoveOperands move = moves->at(i); | 613 LMoveOperands move = moves->at(i); |
| 571 LOperand* from = move.from(); | 614 LOperand* from = move.from(); |
| 572 LOperand* to = move.to(); | 615 LOperand* to = move.to(); |
| 573 ASSERT(!from->IsDoubleRegister() || | 616 ASSERT(!from->IsDoubleRegister() || |
| 574 !ToDoubleRegister(from).is(xmm_scratch)); | 617 !ToDoubleRegister(from).is(xmm_scratch)); |
| 575 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); | 618 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); |
| 576 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); | 619 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); |
| 577 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); | 620 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); |
| 578 if (from->IsConstantOperand()) { | 621 if (from->IsConstantOperand()) { |
| (...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 933 __ sub(ToRegister(left), ToOperand(right)); | 976 __ sub(ToRegister(left), ToOperand(right)); |
| 934 } | 977 } |
| 935 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 978 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 936 DeoptimizeIf(overflow, instr->environment()); | 979 DeoptimizeIf(overflow, instr->environment()); |
| 937 } | 980 } |
| 938 } | 981 } |
| 939 | 982 |
| 940 | 983 |
| 941 void LCodeGen::DoConstantI(LConstantI* instr) { | 984 void LCodeGen::DoConstantI(LConstantI* instr) { |
| 942 ASSERT(instr->result()->IsRegister()); | 985 ASSERT(instr->result()->IsRegister()); |
| 943 __ mov(ToRegister(instr->result()), instr->value()); | 986 __ Set(ToRegister(instr->result()), Immediate(instr->value())); |
| 944 } | 987 } |
| 945 | 988 |
| 946 | 989 |
| 947 void LCodeGen::DoConstantD(LConstantD* instr) { | 990 void LCodeGen::DoConstantD(LConstantD* instr) { |
| 948 ASSERT(instr->result()->IsDoubleRegister()); | 991 ASSERT(instr->result()->IsDoubleRegister()); |
| 949 XMMRegister res = ToDoubleRegister(instr->result()); | 992 XMMRegister res = ToDoubleRegister(instr->result()); |
| 950 double v = instr->value(); | 993 double v = instr->value(); |
| 951 // Use xor to produce +0.0 in a fast and compact way, but avoid to | 994 // Use xor to produce +0.0 in a fast and compact way, but avoid to |
| 952 // do so if the constant is -0.0. | 995 // do so if the constant is -0.0. |
| 953 if (BitCast<uint64_t, double>(v) == 0) { | 996 if (BitCast<uint64_t, double>(v) == 0) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 966 __ push_imm32(lower); | 1009 __ push_imm32(lower); |
| 967 __ movdbl(res, Operand(esp, 0)); | 1010 __ movdbl(res, Operand(esp, 0)); |
| 968 __ add(Operand(esp), Immediate(2 * kPointerSize)); | 1011 __ add(Operand(esp), Immediate(2 * kPointerSize)); |
| 969 } | 1012 } |
| 970 } | 1013 } |
| 971 } | 1014 } |
| 972 | 1015 |
| 973 | 1016 |
| 974 void LCodeGen::DoConstantT(LConstantT* instr) { | 1017 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 975 ASSERT(instr->result()->IsRegister()); | 1018 ASSERT(instr->result()->IsRegister()); |
| 976 __ mov(ToRegister(instr->result()), Immediate(instr->value())); | 1019 __ Set(ToRegister(instr->result()), Immediate(instr->value())); |
| 977 } | 1020 } |
| 978 | 1021 |
| 979 | 1022 |
| 980 void LCodeGen::DoArrayLength(LArrayLength* instr) { | 1023 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { |
| 981 Register result = ToRegister(instr->result()); | 1024 Register result = ToRegister(instr->result()); |
| 982 | 1025 Register array = ToRegister(instr->input()); |
| 983 if (instr->hydrogen()->value()->IsLoadElements()) { | 1026 __ mov(result, FieldOperand(array, JSArray::kLengthOffset)); |
| 984 // We load the length directly from the elements array. | |
| 985 Register elements = ToRegister(instr->input()); | |
| 986 __ mov(result, FieldOperand(elements, FixedArray::kLengthOffset)); | |
| 987 } else { | |
| 988 // Check that the receiver really is an array. | |
| 989 Register array = ToRegister(instr->input()); | |
| 990 Register temporary = ToRegister(instr->temporary()); | |
| 991 __ CmpObjectType(array, JS_ARRAY_TYPE, temporary); | |
| 992 DeoptimizeIf(not_equal, instr->environment()); | |
| 993 | |
| 994 // Load length directly from the array. | |
| 995 __ mov(result, FieldOperand(array, JSArray::kLengthOffset)); | |
| 996 } | |
| 997 } | 1027 } |
| 998 | 1028 |
| 999 | 1029 |
| 1030 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) { |
| 1031 Register result = ToRegister(instr->result()); |
| 1032 Register array = ToRegister(instr->input()); |
| 1033 __ mov(result, FieldOperand(array, FixedArray::kLengthOffset)); |
| 1034 } |
| 1035 |
| 1036 |
| 1000 void LCodeGen::DoValueOf(LValueOf* instr) { | 1037 void LCodeGen::DoValueOf(LValueOf* instr) { |
| 1001 Register input = ToRegister(instr->input()); | 1038 Register input = ToRegister(instr->input()); |
| 1002 Register result = ToRegister(instr->result()); | 1039 Register result = ToRegister(instr->result()); |
| 1003 Register map = ToRegister(instr->temporary()); | 1040 Register map = ToRegister(instr->temporary()); |
| 1004 ASSERT(input.is(result)); | 1041 ASSERT(input.is(result)); |
| 1005 NearLabel done; | 1042 NearLabel done; |
| 1006 // If the object is a smi return the object. | 1043 // If the object is a smi return the object. |
| 1007 __ test(input, Immediate(kSmiTagMask)); | 1044 __ test(input, Immediate(kSmiTagMask)); |
| 1008 __ j(zero, &done); | 1045 __ j(zero, &done); |
| 1009 | 1046 |
| (...skipping 683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1693 Register reg = ToRegister(instr->input()); | 1730 Register reg = ToRegister(instr->input()); |
| 1694 int true_block = instr->true_block_id(); | 1731 int true_block = instr->true_block_id(); |
| 1695 int false_block = instr->false_block_id(); | 1732 int false_block = instr->false_block_id(); |
| 1696 | 1733 |
| 1697 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 1734 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 1698 EmitBranch(true_block, false_block, equal); | 1735 EmitBranch(true_block, false_block, equal); |
| 1699 } | 1736 } |
| 1700 | 1737 |
| 1701 | 1738 |
| 1702 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 1739 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 1703 // Object and function are in fixed registers eax and edx. | 1740 // Object and function are in fixed registers defined by the stub. |
| 1704 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1741 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1705 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1742 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1706 | 1743 |
| 1707 NearLabel true_value, done; | 1744 NearLabel true_value, done; |
| 1708 __ test(eax, Operand(eax)); | 1745 __ test(eax, Operand(eax)); |
| 1709 __ j(zero, &true_value); | 1746 __ j(zero, &true_value); |
| 1710 __ mov(ToRegister(instr->result()), FACTORY->false_value()); | 1747 __ mov(ToRegister(instr->result()), FACTORY->false_value()); |
| 1711 __ jmp(&done); | 1748 __ jmp(&done); |
| 1712 __ bind(&true_value); | 1749 __ bind(&true_value); |
| 1713 __ mov(ToRegister(instr->result()), FACTORY->true_value()); | 1750 __ mov(ToRegister(instr->result()), FACTORY->true_value()); |
| 1714 __ bind(&done); | 1751 __ bind(&done); |
| 1715 } | 1752 } |
| 1716 | 1753 |
| 1717 | 1754 |
| 1718 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { | 1755 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { |
| 1719 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1756 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1720 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1757 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1721 | 1758 |
| 1722 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 1759 InstanceofStub stub(InstanceofStub::kArgsInRegisters); |
| 1723 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1760 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1724 __ test(eax, Operand(eax)); | 1761 __ test(eax, Operand(eax)); |
| 1725 EmitBranch(true_block, false_block, zero); | 1762 EmitBranch(true_block, false_block, zero); |
| 1726 } | 1763 } |
| 1727 | 1764 |
| 1728 | 1765 |
| 1766 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 1767 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
| 1768 public: |
| 1769 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
| 1770 LInstanceOfKnownGlobal* instr) |
| 1771 : LDeferredCode(codegen), instr_(instr) { } |
| 1772 virtual void Generate() { |
| 1773 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); |
| 1774 } |
| 1775 |
| 1776 Label* map_check() { return &map_check_; } |
| 1777 |
| 1778 private: |
| 1779 LInstanceOfKnownGlobal* instr_; |
| 1780 Label map_check_; |
| 1781 }; |
| 1782 |
| 1783 DeferredInstanceOfKnownGlobal* deferred; |
| 1784 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
| 1785 |
| 1786 Label done, false_result; |
| 1787 Register object = ToRegister(instr->input()); |
| 1788 Register temp = ToRegister(instr->temp()); |
| 1789 |
| 1790 // A Smi is not instance of anything. |
| 1791 __ test(object, Immediate(kSmiTagMask)); |
| 1792 __ j(zero, &false_result, not_taken); |
| 1793 |
| 1794 // This is the inlined call site instanceof cache. The two occourences of the |
| 1795 // hole value will be patched to the last map/result pair generated by the |
| 1796 // instanceof stub. |
| 1797 NearLabel cache_miss; |
| 1798 Register map = ToRegister(instr->temp()); |
| 1799 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 1800 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 1801 __ cmp(map, FACTORY->the_hole_value()); // Patched to cached map. |
| 1802 __ j(not_equal, &cache_miss, not_taken); |
| 1803 __ mov(eax, FACTORY->the_hole_value()); // Patched to either true or false. |
| 1804 __ jmp(&done); |
| 1805 |
| 1806 // The inlined call site cache did not match. Check null and string before |
| 1807 // calling the deferred code. |
| 1808 __ bind(&cache_miss); |
| 1809 // Null is not instance of anything. |
| 1810 __ cmp(object, FACTORY->null_value()); |
| 1811 __ j(equal, &false_result); |
| 1812 |
| 1813 // String values are not instances of anything. |
| 1814 Condition is_string = masm_->IsObjectStringType(object, temp, temp); |
| 1815 __ j(is_string, &false_result); |
| 1816 |
| 1817 // Go to the deferred code. |
| 1818 __ jmp(deferred->entry()); |
| 1819 |
| 1820 __ bind(&false_result); |
| 1821 __ mov(ToRegister(instr->result()), FACTORY->false_value()); |
| 1822 |
| 1823 // Here result has either true or false. Deferred code also produces true or |
| 1824 // false object. |
| 1825 __ bind(deferred->exit()); |
| 1826 __ bind(&done); |
| 1827 } |
| 1828 |
| 1829 |
| 1830 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 1831 Label* map_check) { |
| 1832 __ PushSafepointRegisters(); |
| 1833 |
| 1834 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 1835 flags = static_cast<InstanceofStub::Flags>( |
| 1836 flags | InstanceofStub::kArgsInRegisters); |
| 1837 flags = static_cast<InstanceofStub::Flags>( |
| 1838 flags | InstanceofStub::kCallSiteInlineCheck); |
| 1839 flags = static_cast<InstanceofStub::Flags>( |
| 1840 flags | InstanceofStub::kReturnTrueFalseObject); |
| 1841 InstanceofStub stub(flags); |
| 1842 |
| 1843 // Get the temp register reserved by the instruction. This needs to be edi as |
| 1844 // its slot of the pushing of safepoint registers is used to communicate the |
| 1845 // offset to the location of the map check. |
| 1846 Register temp = ToRegister(instr->temp()); |
| 1847 ASSERT(temp.is(edi)); |
| 1848 __ mov(InstanceofStub::right(), Immediate(instr->function())); |
| 1849 static const int kAdditionalDelta = 13; |
| 1850 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
| 1851 Label before_push_delta; |
| 1852 __ bind(&before_push_delta); |
| 1853 __ mov(temp, Immediate(delta)); |
| 1854 __ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp); |
| 1855 __ call(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 1856 ASSERT_EQ(kAdditionalDelta, |
| 1857 masm_->SizeOfCodeGeneratedSince(&before_push_delta)); |
| 1858 RecordSafepointWithRegisters( |
| 1859 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); |
| 1860 // Put the result value into the eax slot and restore all registers. |
| 1861 __ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax); |
| 1862 |
| 1863 __ PopSafepointRegisters(); |
| 1864 } |
| 1865 |
| 1866 |
| 1729 static Condition ComputeCompareCondition(Token::Value op) { | 1867 static Condition ComputeCompareCondition(Token::Value op) { |
| 1730 switch (op) { | 1868 switch (op) { |
| 1731 case Token::EQ_STRICT: | 1869 case Token::EQ_STRICT: |
| 1732 case Token::EQ: | 1870 case Token::EQ: |
| 1733 return equal; | 1871 return equal; |
| 1734 case Token::LT: | 1872 case Token::LT: |
| 1735 return less; | 1873 return less; |
| 1736 case Token::GT: | 1874 case Token::GT: |
| 1737 return greater; | 1875 return greater; |
| 1738 case Token::LTE: | 1876 case Token::LTE: |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1831 ASSERT(ToRegister(instr->object()).is(eax)); | 1969 ASSERT(ToRegister(instr->object()).is(eax)); |
| 1832 ASSERT(ToRegister(instr->result()).is(eax)); | 1970 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1833 | 1971 |
| 1834 __ mov(ecx, instr->name()); | 1972 __ mov(ecx, instr->name()); |
| 1835 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 1973 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 1836 Builtins::LoadIC_Initialize)); | 1974 Builtins::LoadIC_Initialize)); |
| 1837 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 1975 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 1838 } | 1976 } |
| 1839 | 1977 |
| 1840 | 1978 |
| 1979 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { |
| 1980 Register function = ToRegister(instr->function()); |
| 1981 Register temp = ToRegister(instr->temporary()); |
| 1982 Register result = ToRegister(instr->result()); |
| 1983 |
| 1984 // Check that the function really is a function. |
| 1985 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); |
| 1986 DeoptimizeIf(not_equal, instr->environment()); |
| 1987 |
| 1988 // Check whether the function has an instance prototype. |
| 1989 NearLabel non_instance; |
| 1990 __ test_b(FieldOperand(result, Map::kBitFieldOffset), |
| 1991 1 << Map::kHasNonInstancePrototype); |
| 1992 __ j(not_zero, &non_instance); |
| 1993 |
| 1994 // Get the prototype or initial map from the function. |
| 1995 __ mov(result, |
| 1996 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 1997 |
| 1998 // Check that the function has a prototype or an initial map. |
| 1999 __ cmp(Operand(result), Immediate(FACTORY->the_hole_value())); |
| 2000 DeoptimizeIf(equal, instr->environment()); |
| 2001 |
| 2002 // If the function does not have an initial map, we're done. |
| 2003 NearLabel done; |
| 2004 __ CmpObjectType(result, MAP_TYPE, temp); |
| 2005 __ j(not_equal, &done); |
| 2006 |
| 2007 // Get the prototype from the initial map. |
| 2008 __ mov(result, FieldOperand(result, Map::kPrototypeOffset)); |
| 2009 __ jmp(&done); |
| 2010 |
| 2011 // Non-instance prototype: Fetch prototype from constructor field |
| 2012 // in the function's map. |
| 2013 __ bind(&non_instance); |
| 2014 __ mov(result, FieldOperand(result, Map::kConstructorOffset)); |
| 2015 |
| 2016 // All done. |
| 2017 __ bind(&done); |
| 2018 } |
| 2019 |
| 2020 |
| 1841 void LCodeGen::DoLoadElements(LLoadElements* instr) { | 2021 void LCodeGen::DoLoadElements(LLoadElements* instr) { |
| 1842 ASSERT(instr->result()->Equals(instr->input())); | 2022 ASSERT(instr->result()->Equals(instr->input())); |
| 1843 Register reg = ToRegister(instr->input()); | 2023 Register reg = ToRegister(instr->input()); |
| 1844 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset)); | 2024 __ mov(reg, FieldOperand(reg, JSObject::kElementsOffset)); |
| 1845 if (FLAG_debug_code) { | 2025 if (FLAG_debug_code) { |
| 1846 NearLabel done; | 2026 NearLabel done; |
| 1847 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2027 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 1848 Immediate(FACTORY->fixed_array_map())); | 2028 Immediate(FACTORY->fixed_array_map())); |
| 1849 __ j(equal, &done); | 2029 __ j(equal, &done); |
| 1850 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 2030 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 1851 Immediate(FACTORY->fixed_cow_array_map())); | 2031 Immediate(FACTORY->fixed_cow_array_map())); |
| 1852 __ Check(equal, "Check for fast elements failed."); | 2032 __ Check(equal, "Check for fast elements failed."); |
| 1853 __ bind(&done); | 2033 __ bind(&done); |
| 1854 } | 2034 } |
| 1855 } | 2035 } |
| 1856 | 2036 |
| 1857 | 2037 |
| 1858 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 2038 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| 1859 Register arguments = ToRegister(instr->arguments()); | 2039 Register arguments = ToRegister(instr->arguments()); |
| 1860 Register length = ToRegister(instr->length()); | 2040 Register length = ToRegister(instr->length()); |
| 1861 Operand index = ToOperand(instr->index()); | 2041 Operand index = ToOperand(instr->index()); |
| 1862 Register result = ToRegister(instr->result()); | 2042 Register result = ToRegister(instr->result()); |
| 1863 | 2043 |
| 1864 __ sub(length, index); | 2044 __ sub(length, index); |
| 1865 DeoptimizeIf(below_equal, instr->environment()); | 2045 DeoptimizeIf(below_equal, instr->environment()); |
| 1866 | 2046 |
| 2047 // There are two words between the frame pointer and the last argument. |
| 2048 // Subtracting from length accounts for one of them add one more. |
| 1867 __ mov(result, Operand(arguments, length, times_4, kPointerSize)); | 2049 __ mov(result, Operand(arguments, length, times_4, kPointerSize)); |
| 1868 } | 2050 } |
| 1869 | 2051 |
| 1870 | 2052 |
| 1871 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { | 2053 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { |
| 1872 Register elements = ToRegister(instr->elements()); | 2054 Register elements = ToRegister(instr->elements()); |
| 1873 Register key = ToRegister(instr->key()); | 2055 Register key = ToRegister(instr->key()); |
| 1874 Register result; | 2056 Register result = ToRegister(instr->result()); |
| 1875 if (instr->load_result() != NULL) { | 2057 ASSERT(result.is(elements)); |
| 1876 result = ToRegister(instr->load_result()); | |
| 1877 } else { | |
| 1878 result = ToRegister(instr->result()); | |
| 1879 ASSERT(result.is(elements)); | |
| 1880 } | |
| 1881 | 2058 |
| 1882 // Load the result. | 2059 // Load the result. |
| 1883 __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); | 2060 __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); |
| 1884 | 2061 |
| 1885 Representation r = instr->hydrogen()->representation(); | 2062 // Check for the hole value. |
| 1886 if (r.IsInteger32()) { | 2063 __ cmp(result, FACTORY->the_hole_value()); |
| 1887 // Untag and check for smi. | 2064 DeoptimizeIf(equal, instr->environment()); |
| 1888 __ SmiUntag(result); | |
| 1889 DeoptimizeIf(carry, instr->environment()); | |
| 1890 } else if (r.IsDouble()) { | |
| 1891 EmitNumberUntagD(result, | |
| 1892 ToDoubleRegister(instr->result()), | |
| 1893 instr->environment()); | |
| 1894 } else { | |
| 1895 // Check for the hole value. | |
| 1896 ASSERT(r.IsTagged()); | |
| 1897 __ cmp(result, FACTORY->the_hole_value()); | |
| 1898 DeoptimizeIf(equal, instr->environment()); | |
| 1899 } | |
| 1900 } | 2065 } |
| 1901 | 2066 |
| 1902 | 2067 |
| 1903 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 2068 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 1904 ASSERT(ToRegister(instr->object()).is(edx)); | 2069 ASSERT(ToRegister(instr->object()).is(edx)); |
| 1905 ASSERT(ToRegister(instr->key()).is(eax)); | 2070 ASSERT(ToRegister(instr->key()).is(eax)); |
| 1906 | 2071 |
| 1907 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | 2072 Handle<Code> ic(Isolate::Current()->builtins()->builtin( |
| 1908 Builtins::KeyedLoadIC_Initialize)); | 2073 Builtins::KeyedLoadIC_Initialize)); |
| 1909 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2074 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 1910 } | 2075 } |
| 1911 | 2076 |
| 1912 | 2077 |
| 1913 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 2078 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 1914 Register result = ToRegister(instr->result()); | 2079 Register result = ToRegister(instr->result()); |
| 1915 | 2080 |
| 1916 // Check for arguments adapter frame. | 2081 // Check for arguments adapter frame. |
| 1917 Label done, adapted; | 2082 NearLabel done, adapted; |
| 1918 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2083 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 1919 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); | 2084 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset)); |
| 1920 __ cmp(Operand(result), | 2085 __ cmp(Operand(result), |
| 1921 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2086 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1922 __ j(equal, &adapted); | 2087 __ j(equal, &adapted); |
| 1923 | 2088 |
| 1924 // No arguments adaptor frame. | 2089 // No arguments adaptor frame. |
| 1925 __ mov(result, Operand(ebp)); | 2090 __ mov(result, Operand(ebp)); |
| 1926 __ jmp(&done); | 2091 __ jmp(&done); |
| 1927 | 2092 |
| 1928 // Arguments adaptor frame present. | 2093 // Arguments adaptor frame present. |
| 1929 __ bind(&adapted); | 2094 __ bind(&adapted); |
| 1930 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2095 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 1931 | 2096 |
| 1932 // Done. Pointer to topmost argument is in result. | 2097 // Result is the frame pointer for the frame if not adapted and for the real |
| 2098 // frame below the adaptor frame if adapted. |
| 1933 __ bind(&done); | 2099 __ bind(&done); |
| 1934 } | 2100 } |
| 1935 | 2101 |
| 1936 | 2102 |
| 1937 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { | 2103 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { |
| 1938 Operand elem = ToOperand(instr->input()); | 2104 Operand elem = ToOperand(instr->input()); |
| 1939 Register result = ToRegister(instr->result()); | 2105 Register result = ToRegister(instr->result()); |
| 1940 | 2106 |
| 1941 Label done; | 2107 NearLabel done; |
| 1942 | 2108 |
| 1943 // No arguments adaptor frame. Number of arguments is fixed. | 2109 // If no arguments adaptor frame the number of arguments is fixed. |
| 1944 __ cmp(ebp, elem); | 2110 __ cmp(ebp, elem); |
| 1945 __ mov(result, Immediate(scope()->num_parameters())); | 2111 __ mov(result, Immediate(scope()->num_parameters())); |
| 1946 __ j(equal, &done); | 2112 __ j(equal, &done); |
| 1947 | 2113 |
| 1948 // Arguments adaptor frame present. Get argument length from there. | 2114 // Arguments adaptor frame present. Get argument length from there. |
| 1949 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2115 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 1950 __ mov(result, Operand(result, | 2116 __ mov(result, Operand(result, |
| 1951 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2117 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1952 __ SmiUntag(result); | 2118 __ SmiUntag(result); |
| 1953 | 2119 |
| 1954 // Done. Argument length is in result register. | 2120 // Argument length is in result register. |
| 1955 __ bind(&done); | 2121 __ bind(&done); |
| 1956 } | 2122 } |
| 1957 | 2123 |
| 1958 | 2124 |
| 1959 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 2125 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 1960 Register receiver = ToRegister(instr->receiver()); | 2126 Register receiver = ToRegister(instr->receiver()); |
| 1961 ASSERT(ToRegister(instr->function()).is(edi)); | 2127 ASSERT(ToRegister(instr->function()).is(edi)); |
| 1962 ASSERT(ToRegister(instr->result()).is(eax)); | 2128 ASSERT(ToRegister(instr->result()).is(eax)); |
| 1963 | 2129 |
| 1964 // If the receiver is null or undefined, we have to pass the | 2130 // If the receiver is null or undefined, we have to pass the |
| (...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2498 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 2664 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 2499 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 2665 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 2500 int offset = | 2666 int offset = |
| 2501 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; | 2667 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; |
| 2502 __ mov(FieldOperand(elements, offset), value); | 2668 __ mov(FieldOperand(elements, offset), value); |
| 2503 } else { | 2669 } else { |
| 2504 __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize), | 2670 __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize), |
| 2505 value); | 2671 value); |
| 2506 } | 2672 } |
| 2507 | 2673 |
| 2508 // Update the write barrier unless we're certain that we're storing a smi. | |
| 2509 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2674 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2510 // Compute address of modified element and store it into key register. | 2675 // Compute address of modified element and store it into key register. |
| 2511 __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); | 2676 __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); |
| 2512 __ RecordWrite(elements, key, value); | 2677 __ RecordWrite(elements, key, value); |
| 2513 } | 2678 } |
| 2514 } | 2679 } |
| 2515 | 2680 |
| 2516 | 2681 |
| 2517 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2682 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 2518 ASSERT(ToRegister(instr->object()).is(edx)); | 2683 ASSERT(ToRegister(instr->object()).is(edx)); |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2850 __ add(Operand(esp), Immediate(kDoubleSize)); | 3015 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2851 DeoptimizeIf(no_condition, instr->environment()); | 3016 DeoptimizeIf(no_condition, instr->environment()); |
| 2852 __ bind(&convert); | 3017 __ bind(&convert); |
| 2853 // Do conversion, which cannot fail because we checked the exponent. | 3018 // Do conversion, which cannot fail because we checked the exponent. |
| 2854 __ fld_d(Operand(esp, 0)); | 3019 __ fld_d(Operand(esp, 0)); |
| 2855 __ fisttp_d(Operand(esp, 0)); | 3020 __ fisttp_d(Operand(esp, 0)); |
| 2856 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result. | 3021 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result. |
| 2857 __ add(Operand(esp), Immediate(kDoubleSize)); | 3022 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2858 __ bind(&done); | 3023 __ bind(&done); |
| 2859 } else { | 3024 } else { |
| 2860 // This will bail out if the input was not in the int32 range (or, | 3025 NearLabel done; |
| 2861 // unfortunately, if the input was 0x80000000). | 3026 Register temp_reg = ToRegister(instr->temporary()); |
| 2862 DeoptimizeIf(equal, instr->environment()); | 3027 XMMRegister xmm_scratch = xmm0; |
| 3028 |
| 3029 // If cvttsd2si succeeded, we're done. Otherwise, we attempt |
| 3030 // manual conversion. |
| 3031 __ j(not_equal, &done); |
| 3032 |
| 3033 // Get high 32 bits of the input in result_reg and temp_reg. |
| 3034 __ pshufd(xmm_scratch, input_reg, 1); |
| 3035 __ movd(Operand(temp_reg), xmm_scratch); |
| 3036 __ mov(result_reg, temp_reg); |
| 3037 |
| 3038 // Prepare negation mask in temp_reg. |
| 3039 __ sar(temp_reg, kBitsPerInt - 1); |
| 3040 |
| 3041 // Extract the exponent from result_reg and subtract adjusted |
| 3042 // bias from it. The adjustment is selected in a way such that |
| 3043 // when the difference is zero, the answer is in the low 32 bits |
| 3044 // of the input, otherwise a shift has to be performed. |
| 3045 __ shr(result_reg, HeapNumber::kExponentShift); |
| 3046 __ and_(result_reg, |
| 3047 HeapNumber::kExponentMask >> HeapNumber::kExponentShift); |
| 3048 __ sub(Operand(result_reg), |
| 3049 Immediate(HeapNumber::kExponentBias + |
| 3050 HeapNumber::kExponentBits + |
| 3051 HeapNumber::kMantissaBits)); |
| 3052 // Don't handle big (> kMantissaBits + kExponentBits == 63) or |
| 3053 // special exponents. |
| 3054 DeoptimizeIf(greater, instr->environment()); |
| 3055 |
| 3056 // Zero out the sign and the exponent in the input (by shifting |
| 3057 // it to the left) and restore the implicit mantissa bit, |
| 3058 // i.e. convert the input to unsigned int64 shifted left by |
| 3059 // kExponentBits. |
| 3060 ExternalReference minus_zero = ExternalReference::address_of_minus_zero(); |
| 3061 // Minus zero has the most significant bit set and the other |
| 3062 // bits cleared. |
| 3063 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero)); |
| 3064 __ psllq(input_reg, HeapNumber::kExponentBits); |
| 3065 __ por(input_reg, xmm_scratch); |
| 3066 |
| 3067 // Get the amount to shift the input right in xmm_scratch. |
| 3068 __ neg(result_reg); |
| 3069 __ movd(xmm_scratch, Operand(result_reg)); |
| 3070 |
| 3071 // Shift the input right and extract low 32 bits. |
| 3072 __ psrlq(input_reg, xmm_scratch); |
| 3073 __ movd(Operand(result_reg), input_reg); |
| 3074 |
| 3075 // Use the prepared mask in temp_reg to negate the result if necessary. |
| 3076 __ xor_(result_reg, Operand(temp_reg)); |
| 3077 __ sub(result_reg, Operand(temp_reg)); |
| 3078 __ bind(&done); |
| 2863 } | 3079 } |
| 2864 } else { | 3080 } else { |
| 2865 NearLabel done; | 3081 NearLabel done; |
| 2866 __ cvttsd2si(result_reg, Operand(input_reg)); | 3082 __ cvttsd2si(result_reg, Operand(input_reg)); |
| 2867 __ cvtsi2sd(xmm0, Operand(result_reg)); | 3083 __ cvtsi2sd(xmm0, Operand(result_reg)); |
| 2868 __ ucomisd(xmm0, input_reg); | 3084 __ ucomisd(xmm0, input_reg); |
| 2869 DeoptimizeIf(not_equal, instr->environment()); | 3085 DeoptimizeIf(not_equal, instr->environment()); |
| 2870 DeoptimizeIf(parity_even, instr->environment()); // NaN. | 3086 DeoptimizeIf(parity_even, instr->environment()); // NaN. |
| 2871 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3087 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 2872 // The integer converted back is equal to the original. We | 3088 // The integer converted back is equal to the original. We |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2892 DeoptimizeIf(instr->condition(), instr->environment()); | 3108 DeoptimizeIf(instr->condition(), instr->environment()); |
| 2893 } | 3109 } |
| 2894 | 3110 |
| 2895 | 3111 |
| 2896 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 3112 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 2897 Register input = ToRegister(instr->input()); | 3113 Register input = ToRegister(instr->input()); |
| 2898 Register temp = ToRegister(instr->temp()); | 3114 Register temp = ToRegister(instr->temp()); |
| 2899 InstanceType first = instr->hydrogen()->first(); | 3115 InstanceType first = instr->hydrogen()->first(); |
| 2900 InstanceType last = instr->hydrogen()->last(); | 3116 InstanceType last = instr->hydrogen()->last(); |
| 2901 | 3117 |
| 2902 __ test(input, Immediate(kSmiTagMask)); | |
| 2903 DeoptimizeIf(zero, instr->environment()); | |
| 2904 | |
| 2905 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); | 3118 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); |
| 2906 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), | 3119 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), |
| 2907 static_cast<int8_t>(first)); | 3120 static_cast<int8_t>(first)); |
| 2908 | 3121 |
| 2909 // If there is only one type in the interval check for equality. | 3122 // If there is only one type in the interval check for equality. |
| 2910 if (first == last) { | 3123 if (first == last) { |
| 2911 DeoptimizeIf(not_equal, instr->environment()); | 3124 DeoptimizeIf(not_equal, instr->environment()); |
| 2912 } else { | 3125 } else { |
| 2913 DeoptimizeIf(below, instr->environment()); | 3126 DeoptimizeIf(below, instr->environment()); |
| 2914 // Omit check for the last type. | 3127 // Omit check for the last type. |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2947 } else { | 3160 } else { |
| 2948 __ mov(result, prototype); | 3161 __ mov(result, prototype); |
| 2949 } | 3162 } |
| 2950 } | 3163 } |
| 2951 | 3164 |
| 2952 | 3165 |
| 2953 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 3166 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 2954 Register reg = ToRegister(instr->temp()); | 3167 Register reg = ToRegister(instr->temp()); |
| 2955 | 3168 |
| 2956 Handle<JSObject> holder = instr->holder(); | 3169 Handle<JSObject> holder = instr->holder(); |
| 2957 Handle<Map> receiver_map = instr->receiver_map(); | 3170 Handle<JSObject> current_prototype = instr->prototype(); |
| 2958 Handle<JSObject> current_prototype(JSObject::cast(receiver_map->prototype())); | |
| 2959 | 3171 |
| 2960 // Load prototype object. | 3172 // Load prototype object. |
| 2961 LoadPrototype(reg, current_prototype); | 3173 LoadPrototype(reg, current_prototype); |
| 2962 | 3174 |
| 2963 // Check prototype maps up to the holder. | 3175 // Check prototype maps up to the holder. |
| 2964 while (!current_prototype.is_identical_to(holder)) { | 3176 while (!current_prototype.is_identical_to(holder)) { |
| 2965 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), | 3177 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), |
| 2966 Handle<Map>(current_prototype->map())); | 3178 Handle<Map>(current_prototype->map())); |
| 2967 DeoptimizeIf(not_equal, instr->environment()); | 3179 DeoptimizeIf(not_equal, instr->environment()); |
| 2968 current_prototype = | 3180 current_prototype = |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3007 | 3219 |
| 3008 | 3220 |
| 3009 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 3221 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 3010 // Setup the parameters to the stub/runtime call. | 3222 // Setup the parameters to the stub/runtime call. |
| 3011 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 3223 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3012 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); | 3224 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset)); |
| 3013 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 3225 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 3014 __ push(Immediate(instr->hydrogen()->constant_properties())); | 3226 __ push(Immediate(instr->hydrogen()->constant_properties())); |
| 3015 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); | 3227 __ push(Immediate(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0))); |
| 3016 | 3228 |
| 3017 // Pick the right runtime function or stub to call. | 3229 // Pick the right runtime function to call. |
| 3018 if (instr->hydrogen()->depth() > 1) { | 3230 if (instr->hydrogen()->depth() > 1) { |
| 3019 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 3231 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
| 3020 } else { | 3232 } else { |
| 3021 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); | 3233 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); |
| 3022 } | 3234 } |
| 3023 } | 3235 } |
| 3024 | 3236 |
| 3025 | 3237 |
| 3026 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 3238 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 3027 NearLabel materialized; | 3239 NearLabel materialized; |
| (...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3275 ASSERT(!environment->HasBeenRegistered()); | 3487 ASSERT(!environment->HasBeenRegistered()); |
| 3276 RegisterEnvironmentForDeoptimization(environment); | 3488 RegisterEnvironmentForDeoptimization(environment); |
| 3277 ASSERT(osr_pc_offset_ == -1); | 3489 ASSERT(osr_pc_offset_ == -1); |
| 3278 osr_pc_offset_ = masm()->pc_offset(); | 3490 osr_pc_offset_ = masm()->pc_offset(); |
| 3279 } | 3491 } |
| 3280 | 3492 |
| 3281 | 3493 |
| 3282 #undef __ | 3494 #undef __ |
| 3283 | 3495 |
| 3284 } } // namespace v8::internal | 3496 } } // namespace v8::internal |
| 3497 |
| 3498 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |