| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2706 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2717 } | 2717 } |
| 2718 | 2718 |
| 2719 | 2719 |
| 2720 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2720 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 2721 class DeferredInstanceOfKnownGlobal: public LDeferredCode { | 2721 class DeferredInstanceOfKnownGlobal: public LDeferredCode { |
| 2722 public: | 2722 public: |
| 2723 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 2723 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
| 2724 LInstanceOfKnownGlobal* instr) | 2724 LInstanceOfKnownGlobal* instr) |
| 2725 : LDeferredCode(codegen), instr_(instr) { } | 2725 : LDeferredCode(codegen), instr_(instr) { } |
| 2726 virtual void Generate() { | 2726 virtual void Generate() { |
| 2727 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | 2727 codegen()->DoDeferredInstanceOfKnownGlobal(instr_); |
| 2728 } | 2728 } |
| 2729 virtual LInstruction* instr() { return instr_; } | 2729 virtual LInstruction* instr() { return instr_; } |
| 2730 Label* map_check() { return &map_check_; } | |
| 2731 private: | 2730 private: |
| 2732 LInstanceOfKnownGlobal* instr_; | 2731 LInstanceOfKnownGlobal* instr_; |
| 2733 Label map_check_; | |
| 2734 }; | 2732 }; |
| 2735 | 2733 |
| 2736 DeferredInstanceOfKnownGlobal* deferred = | 2734 DeferredInstanceOfKnownGlobal* deferred = |
| 2737 new(zone()) DeferredInstanceOfKnownGlobal(this, instr); | 2735 new(zone()) DeferredInstanceOfKnownGlobal(this, instr); |
| 2738 | 2736 |
| 2739 Label return_false, cache_miss; | 2737 Label map_check, return_false, cache_miss, done; |
| 2740 Register object = ToRegister(instr->value()); | 2738 Register object = ToRegister(instr->value()); |
| 2741 Register result = ToRegister(instr->result()); | 2739 Register result = ToRegister(instr->result()); |
| 2740 // x4 is expected in the associated deferred code and stub. |
| 2741 Register map_check_site = x4; |
| 2742 Register map = x5; |
| 2742 | 2743 |
| 2743 // This instruction is marked as call. We can clobber any register. | 2744 // This instruction is marked as call. We can clobber any register. |
| 2744 ASSERT(instr->IsMarkedAsCall()); | 2745 ASSERT(instr->IsMarkedAsCall()); |
| 2745 | 2746 |
| 2746 // We must take into account that object is in x11. | 2747 // We must take into account that object is in x11. |
| 2747 ASSERT(object.Is(x11)); | 2748 ASSERT(object.Is(x11)); |
| 2748 Register scratch = x10; | 2749 Register scratch = x10; |
| 2749 | 2750 |
| 2750 // A Smi is not instance of anything. | 2751 // A Smi is not instance of anything. |
| 2751 __ JumpIfSmi(object, &return_false); | 2752 __ JumpIfSmi(object, &return_false); |
| 2752 | 2753 |
| 2753 TODO_UNIMPLEMENTED("patchable inline check"); | 2754 // This is the inlined call site instanceof cache. The two occurences of the |
| 2755 // hole value will be patched to the last map/result pair generated by the |
| 2756 // instanceof stub. |
| 2757 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 2758 { |
| 2759 // Below we use Factory::the_hole_value() on purpose instead of loading from |
| 2760 // the root array to force relocation and later be able to patch with a |
| 2761 // custom value. |
| 2762 InstructionAccurateScope scope(masm(), 5); |
| 2763 __ bind(&map_check); |
| 2764 // Will be patched with the cached map. |
| 2765 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
| 2766 __ LoadRelocated(scratch, Operand(Handle<Object>(cell))); |
| 2767 __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
| 2768 __ cmp(map, Operand(scratch)); |
| 2769 __ b(&cache_miss, ne); |
| 2770 // The address of this instruction is computed relative to the map check |
| 2771 // above, so check the size of the code generated. |
| 2772 ASSERT(masm()->InstructionsGeneratedSince(&map_check) == 4); |
| 2773 // Will be patched with the cached result. |
| 2774 __ LoadRelocated(result, Operand(factory()->the_hole_value())); |
| 2775 } |
| 2776 __ B(&done); |
| 2754 | 2777 |
| 2755 // The inlined call site cache did not match. | 2778 // The inlined call site cache did not match. |
| 2756 // Check null and string before calling the deferred code. | 2779 // Check null and string before calling the deferred code. |
| 2757 __ Bind(&cache_miss); | 2780 __ Bind(&cache_miss); |
| 2781 // Compute the address of the map check. It must not be clobbered until the |
| 2782 // InstanceOfStub has used it. |
| 2783 __ Adr(map_check_site, &map_check); |
| 2758 // Null is not instance of anything. | 2784 // Null is not instance of anything. |
| 2759 __ JumpIfRoot(object, Heap::kNullValueRootIndex, &return_false); | 2785 __ JumpIfRoot(object, Heap::kNullValueRootIndex, &return_false); |
| 2760 | 2786 |
| 2761 // String values are not instances of anything. | 2787 // String values are not instances of anything. |
| 2762 // Return false if the object is a string. Otherwise, jump to the deferred | 2788 // Return false if the object is a string. Otherwise, jump to the deferred |
| 2763 // code. | 2789 // code. |
| 2764 // Note that we can't jump directly to deferred code from | 2790 // Note that we can't jump directly to deferred code from |
| 2765 // IsObjectJSStringType, because it uses tbz for the jump and the deferred | 2791 // IsObjectJSStringType, because it uses tbz for the jump and the deferred |
| 2766 // code can be out of range. | 2792 // code can be out of range. |
| 2767 __ IsObjectJSStringType(object, scratch, NULL, &return_false); | 2793 __ IsObjectJSStringType(object, scratch, NULL, &return_false); |
| 2768 __ B(deferred->entry()); | 2794 __ B(deferred->entry()); |
| 2769 | 2795 |
| 2770 __ Bind(&return_false); | 2796 __ Bind(&return_false); |
| 2771 __ LoadRoot(result, Heap::kFalseValueRootIndex); | 2797 __ LoadRoot(result, Heap::kFalseValueRootIndex); |
| 2772 | 2798 |
| 2773 // Here result is either true or false. | 2799 // Here result is either true or false. |
| 2774 __ Bind(deferred->exit()); | 2800 __ Bind(deferred->exit()); |
| 2801 __ Bind(&done); |
| 2775 } | 2802 } |
| 2776 | 2803 |
| 2777 | 2804 |
| 2778 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { | 2805 void LCodeGen::DoInstanceSize(LInstanceSize* instr) { |
| 2779 Register object = ToRegister(instr->object()); | 2806 Register object = ToRegister(instr->object()); |
| 2780 Register result = ToRegister(instr->result()); | 2807 Register result = ToRegister(instr->result()); |
| 2781 __ Ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); | 2808 __ Ldr(result, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 2782 __ Ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); | 2809 __ Ldrb(result, FieldMemOperand(result, Map::kInstanceSizeOffset)); |
| 2783 } | 2810 } |
| 2784 | 2811 |
| 2785 | 2812 |
| 2786 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 2813 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 2787 Label* map_check) { | |
| 2788 Register result = ToRegister(instr->result()); | 2814 Register result = ToRegister(instr->result()); |
| 2789 ASSERT(result.Is(x0)); // InstanceofStub returns its result in x0. | 2815 ASSERT(result.Is(x0)); // InstanceofStub returns its result in x0. |
| 2790 InstanceofStub::Flags flags = InstanceofStub::kArgsInRegisters; | 2816 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
| 2817 flags = static_cast<InstanceofStub::Flags>( |
| 2818 flags | InstanceofStub::kArgsInRegisters); |
| 2819 flags = static_cast<InstanceofStub::Flags>( |
| 2820 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2821 flags = static_cast<InstanceofStub::Flags>( |
| 2822 flags | InstanceofStub::kCallSiteInlineCheck); |
| 2791 | 2823 |
| 2792 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2824 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2793 | 2825 |
| 2794 // Prepare InstanceofStub arguments. | 2826 // Prepare InstanceofStub arguments. |
| 2795 ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left())); | 2827 ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left())); |
| 2796 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2828 __ LoadHeapObject(InstanceofStub::right(), instr->function()); |
| 2797 | 2829 |
| 2798 InstanceofStub stub(flags); | 2830 InstanceofStub stub(flags); |
| 2799 CallCodeGeneric(stub.GetCode(isolate()), | 2831 CallCodeGeneric(stub.GetCode(isolate()), |
| 2800 RelocInfo::CODE_TARGET, | 2832 RelocInfo::CODE_TARGET, |
| 2801 instr, | 2833 instr, |
| 2802 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 2834 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 2803 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | 2835 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
| 2804 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 2836 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 2805 | 2837 |
| 2806 // TODO(all): This could be integrated into InstanceofStub. | |
| 2807 __ LoadTrueFalseRoots(x1, x2); | |
| 2808 ASSERT(Smi::FromInt(0) == 0); | |
| 2809 __ Cmp(result, 0); | |
| 2810 __ Csel(result, x1, x2, eq); | |
| 2811 | |
| 2812 // Put the result value into the result register slot. | 2838 // Put the result value into the result register slot. |
| 2813 __ StoreToSafepointRegisterSlot(result, result); | 2839 __ StoreToSafepointRegisterSlot(result, result); |
| 2814 } | 2840 } |
| 2815 | 2841 |
| 2816 | 2842 |
| 2817 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { | 2843 void LCodeGen::DoInstructionGap(LInstructionGap* instr) { |
| 2818 DoGap(instr); | 2844 DoGap(instr); |
| 2819 } | 2845 } |
| 2820 | 2846 |
| 2821 | 2847 |
| (...skipping 2675 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5497 __ Bind(&out_of_object); | 5523 __ Bind(&out_of_object); |
| 5498 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5524 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 5499 // Index is equal to negated out of object property index plus 1. | 5525 // Index is equal to negated out of object property index plus 1. |
| 5500 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5526 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 5501 __ Ldr(result, FieldMemOperand(result, | 5527 __ Ldr(result, FieldMemOperand(result, |
| 5502 FixedArray::kHeaderSize - kPointerSize)); | 5528 FixedArray::kHeaderSize - kPointerSize)); |
| 5503 __ Bind(&done); | 5529 __ Bind(&done); |
| 5504 } | 5530 } |
| 5505 | 5531 |
| 5506 } } // namespace v8::internal | 5532 } } // namespace v8::internal |
| OLD | NEW |