| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 172 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); | 172 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); |
| 173 frame_is_built_ = true; | 173 frame_is_built_ = true; |
| 174 info_->AddNoFrameRange(0, masm_->pc_offset()); | 174 info_->AddNoFrameRange(0, masm_->pc_offset()); |
| 175 } | 175 } |
| 176 | 176 |
| 177 // Reserve space for the stack slots needed by the code. | 177 // Reserve space for the stack slots needed by the code. |
| 178 int slots = GetStackSlotCount(); | 178 int slots = GetStackSlotCount(); |
| 179 if (slots > 0) { | 179 if (slots > 0) { |
| 180 if (FLAG_debug_code) { | 180 if (FLAG_debug_code) { |
| 181 __ Subu(sp, sp, Operand(slots * kPointerSize)); | 181 __ Subu(sp, sp, Operand(slots * kPointerSize)); |
| 182 __ push(a0); | 182 __ Push(a0, a1); |
| 183 __ push(a1); | |
| 184 __ Addu(a0, sp, Operand(slots * kPointerSize)); | 183 __ Addu(a0, sp, Operand(slots * kPointerSize)); |
| 185 __ li(a1, Operand(kSlotsZapValue)); | 184 __ li(a1, Operand(kSlotsZapValue)); |
| 186 Label loop; | 185 Label loop; |
| 187 __ bind(&loop); | 186 __ bind(&loop); |
| 188 __ Subu(a0, a0, Operand(kPointerSize)); | 187 __ Subu(a0, a0, Operand(kPointerSize)); |
| 189 __ sw(a1, MemOperand(a0, 2 * kPointerSize)); | 188 __ sw(a1, MemOperand(a0, 2 * kPointerSize)); |
| 190 __ Branch(&loop, ne, a0, Operand(sp)); | 189 __ Branch(&loop, ne, a0, Operand(sp)); |
| 191 __ pop(a1); | 190 __ Pop(a0, a1); |
| 192 __ pop(a0); | |
| 193 } else { | 191 } else { |
| 194 __ Subu(sp, sp, Operand(slots * kPointerSize)); | 192 __ Subu(sp, sp, Operand(slots * kPointerSize)); |
| 195 } | 193 } |
| 196 } | 194 } |
| 197 | 195 |
| 198 if (info()->saves_caller_doubles()) { | 196 if (info()->saves_caller_doubles()) { |
| 199 SaveCallerDoubles(); | 197 SaveCallerDoubles(); |
| 200 } | 198 } |
| 201 | 199 |
| 202 // Possibly allocate a local context. | 200 // Possibly allocate a local context. |
| (...skipping 2405 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2608 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 2606 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 2609 | 2607 |
| 2610 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 2608 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 2611 __ bind(deferred->map_check()); // Label for calculating code patching. | 2609 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2612 // We use Factory::the_hole_value() on purpose instead of loading from the | 2610 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2613 // root array to force relocation to be able to later patch with | 2611 // root array to force relocation to be able to later patch with |
| 2614 // the cached map. | 2612 // the cached map. |
| 2615 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | 2613 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
| 2616 __ li(at, Operand(Handle<Object>(cell))); | 2614 __ li(at, Operand(Handle<Object>(cell))); |
| 2617 __ lw(at, FieldMemOperand(at, PropertyCell::kValueOffset)); | 2615 __ lw(at, FieldMemOperand(at, PropertyCell::kValueOffset)); |
| 2618 __ Branch(&cache_miss, ne, map, Operand(at)); | 2616 __ BranchShort(&cache_miss, ne, map, Operand(at)); |
| 2619 // We use Factory::the_hole_value() on purpose instead of loading from the | 2617 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2620 // root array to force relocation to be able to later patch | 2618 // root array to force relocation to be able to later patch |
| 2621 // with true or false. | 2619 // with true or false. The distance from map check has to be constant. |
| 2622 __ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE); | 2620 __ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE); |
| 2623 __ Branch(&done); | 2621 __ Branch(&done); |
| 2624 | 2622 |
| 2625 // The inlined call site cache did not match. Check null and string before | 2623 // The inlined call site cache did not match. Check null and string before |
| 2626 // calling the deferred code. | 2624 // calling the deferred code. |
| 2627 __ bind(&cache_miss); | 2625 __ bind(&cache_miss); |
| 2628 // Null is not instance of anything. | 2626 // Null is not instance of anything. |
| 2629 __ LoadRoot(temp, Heap::kNullValueRootIndex); | 2627 __ LoadRoot(temp, Heap::kNullValueRootIndex); |
| 2630 __ Branch(&false_result, eq, object, Operand(temp)); | 2628 __ Branch(&false_result, eq, object, Operand(temp)); |
| 2631 | 2629 |
| (...skipping 1118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3750 ASSERT(ToDoubleRegister(instr->left()).is(f2)); | 3748 ASSERT(ToDoubleRegister(instr->left()).is(f2)); |
| 3751 ASSERT(ToDoubleRegister(instr->result()).is(f0)); | 3749 ASSERT(ToDoubleRegister(instr->result()).is(f0)); |
| 3752 | 3750 |
| 3753 if (exponent_type.IsSmi()) { | 3751 if (exponent_type.IsSmi()) { |
| 3754 MathPowStub stub(MathPowStub::TAGGED); | 3752 MathPowStub stub(MathPowStub::TAGGED); |
| 3755 __ CallStub(&stub); | 3753 __ CallStub(&stub); |
| 3756 } else if (exponent_type.IsTagged()) { | 3754 } else if (exponent_type.IsTagged()) { |
| 3757 Label no_deopt; | 3755 Label no_deopt; |
| 3758 __ JumpIfSmi(a2, &no_deopt); | 3756 __ JumpIfSmi(a2, &no_deopt); |
| 3759 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset)); | 3757 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 3758 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 3760 DeoptimizeIf(ne, instr->environment(), t3, Operand(at)); | 3759 DeoptimizeIf(ne, instr->environment(), t3, Operand(at)); |
| 3761 __ bind(&no_deopt); | 3760 __ bind(&no_deopt); |
| 3762 MathPowStub stub(MathPowStub::TAGGED); | 3761 MathPowStub stub(MathPowStub::TAGGED); |
| 3763 __ CallStub(&stub); | 3762 __ CallStub(&stub); |
| 3764 } else if (exponent_type.IsInteger32()) { | 3763 } else if (exponent_type.IsInteger32()) { |
| 3765 MathPowStub stub(MathPowStub::INTEGER); | 3764 MathPowStub stub(MathPowStub::INTEGER); |
| 3766 __ CallStub(&stub); | 3765 __ CallStub(&stub); |
| 3767 } else { | 3766 } else { |
| 3768 ASSERT(exponent_type.IsDouble()); | 3767 ASSERT(exponent_type.IsDouble()); |
| 3769 MathPowStub stub(MathPowStub::DOUBLE); | 3768 MathPowStub stub(MathPowStub::DOUBLE); |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3856 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 3855 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 3857 } | 3856 } |
| 3858 | 3857 |
| 3859 | 3858 |
| 3860 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 3859 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 3861 ASSERT(ToRegister(instr->context()).is(cp)); | 3860 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3862 ASSERT(ToRegister(instr->function()).is(a1)); | 3861 ASSERT(ToRegister(instr->function()).is(a1)); |
| 3863 ASSERT(ToRegister(instr->result()).is(v0)); | 3862 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3864 | 3863 |
| 3865 int arity = instr->arity(); | 3864 int arity = instr->arity(); |
| 3866 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); | 3865 CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); |
| 3867 if (instr->hydrogen()->IsTailCall()) { | 3866 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3868 if (NeedsEagerFrame()) __ mov(sp, fp); | |
| 3869 __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET); | |
| 3870 } else { | |
| 3871 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | |
| 3872 } | |
| 3873 } | 3867 } |
| 3874 | 3868 |
| 3875 | 3869 |
| 3876 void LCodeGen::DoCallNew(LCallNew* instr) { | 3870 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 3877 ASSERT(ToRegister(instr->context()).is(cp)); | 3871 ASSERT(ToRegister(instr->context()).is(cp)); |
| 3878 ASSERT(ToRegister(instr->constructor()).is(a1)); | 3872 ASSERT(ToRegister(instr->constructor()).is(a1)); |
| 3879 ASSERT(ToRegister(instr->result()).is(v0)); | 3873 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3880 | 3874 |
| 3881 __ li(a0, Operand(instr->arity())); | 3875 __ li(a0, Operand(instr->arity())); |
| 3882 // No cell in a2 for construct type feedback in optimized code | 3876 // No cell in a2 for construct type feedback in optimized code |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4001 temp, | 3995 temp, |
| 4002 GetRAState(), | 3996 GetRAState(), |
| 4003 kSaveFPRegs, | 3997 kSaveFPRegs, |
| 4004 OMIT_REMEMBERED_SET, | 3998 OMIT_REMEMBERED_SET, |
| 4005 OMIT_SMI_CHECK); | 3999 OMIT_SMI_CHECK); |
| 4006 } | 4000 } |
| 4007 } | 4001 } |
| 4008 | 4002 |
| 4009 // Do the store. | 4003 // Do the store. |
| 4010 Register value = ToRegister(instr->value()); | 4004 Register value = ToRegister(instr->value()); |
| 4011 ASSERT(!object.is(value)); | |
| 4012 SmiCheck check_needed = | 4005 SmiCheck check_needed = |
| 4013 instr->hydrogen()->value()->IsHeapObject() | 4006 instr->hydrogen()->value()->IsHeapObject() |
| 4014 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 4007 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 4015 if (access.IsInobject()) { | 4008 if (access.IsInobject()) { |
| 4016 MemOperand operand = FieldMemOperand(object, offset); | 4009 MemOperand operand = FieldMemOperand(object, offset); |
| 4017 __ Store(value, operand, representation); | 4010 __ Store(value, operand, representation); |
| 4018 if (instr->hydrogen()->NeedsWriteBarrier()) { | 4011 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 4019 // Update the write barrier for the object for in-object properties. | 4012 // Update the write barrier for the object for in-object properties. |
| 4020 __ RecordWriteField(object, | 4013 __ RecordWriteField(object, |
| 4021 offset, | 4014 offset, |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4222 } | 4215 } |
| 4223 | 4216 |
| 4224 if (instr->NeedsCanonicalization()) { | 4217 if (instr->NeedsCanonicalization()) { |
| 4225 Label is_nan; | 4218 Label is_nan; |
| 4226 // Check for NaN. All NaNs must be canonicalized. | 4219 // Check for NaN. All NaNs must be canonicalized. |
| 4227 __ BranchF(NULL, &is_nan, eq, value, value); | 4220 __ BranchF(NULL, &is_nan, eq, value, value); |
| 4228 __ Branch(¬_nan); | 4221 __ Branch(¬_nan); |
| 4229 | 4222 |
| 4230 // Only load canonical NaN if the comparison above set the overflow. | 4223 // Only load canonical NaN if the comparison above set the overflow. |
| 4231 __ bind(&is_nan); | 4224 __ bind(&is_nan); |
| 4232 __ Move(double_scratch, | 4225 __ LoadRoot(at, Heap::kNanValueRootIndex); |
| 4233 FixedDoubleArray::canonical_not_the_hole_nan_as_double()); | 4226 __ ldc1(double_scratch, FieldMemOperand(at, HeapNumber::kValueOffset)); |
| 4234 __ sdc1(double_scratch, MemOperand(scratch, instr->additional_index() << | 4227 __ sdc1(double_scratch, MemOperand(scratch, instr->additional_index() << |
| 4235 element_size_shift)); | 4228 element_size_shift)); |
| 4236 __ Branch(&done); | 4229 __ Branch(&done); |
| 4237 } | 4230 } |
| 4238 | 4231 |
| 4239 __ bind(¬_nan); | 4232 __ bind(¬_nan); |
| 4240 __ sdc1(value, MemOperand(scratch, instr->additional_index() << | 4233 __ sdc1(value, MemOperand(scratch, instr->additional_index() << |
| 4241 element_size_shift)); | 4234 element_size_shift)); |
| 4242 __ bind(&done); | 4235 __ bind(&done); |
| 4243 } | 4236 } |
| (...skipping 549 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4793 // Heap number map check. | 4786 // Heap number map check. |
| 4794 __ lw(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4787 __ lw(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
| 4795 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 4788 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 4796 // This 'at' value and scratch1 map value are used for tests in both clauses | 4789 // This 'at' value and scratch1 map value are used for tests in both clauses |
| 4797 // of the if. | 4790 // of the if. |
| 4798 | 4791 |
| 4799 if (instr->truncating()) { | 4792 if (instr->truncating()) { |
| 4800 // Performs a truncating conversion of a floating point number as used by | 4793 // Performs a truncating conversion of a floating point number as used by |
| 4801 // the JS bitwise operations. | 4794 // the JS bitwise operations. |
| 4802 Label no_heap_number, check_bools, check_false; | 4795 Label no_heap_number, check_bools, check_false; |
| 4803 __ Branch(&no_heap_number, ne, scratch1, Operand(at)); // HeapNumber map? | 4796 // Check HeapNumber map. |
| 4804 __ mov(scratch2, input_reg); | 4797 __ Branch(USE_DELAY_SLOT, &no_heap_number, ne, scratch1, Operand(at)); |
| 4798 __ mov(scratch2, input_reg); // In delay slot. |
| 4805 __ TruncateHeapNumberToI(input_reg, scratch2); | 4799 __ TruncateHeapNumberToI(input_reg, scratch2); |
| 4806 __ Branch(&done); | 4800 __ Branch(&done); |
| 4807 | 4801 |
| 4808 // Check for Oddballs. Undefined/False is converted to zero and True to one | 4802 // Check for Oddballs. Undefined/False is converted to zero and True to one |
| 4809 // for truncating conversions. | 4803 // for truncating conversions. |
| 4810 __ bind(&no_heap_number); | 4804 __ bind(&no_heap_number); |
| 4811 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4805 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 4812 __ Branch(&check_bools, ne, input_reg, Operand(at)); | 4806 __ Branch(&check_bools, ne, input_reg, Operand(at)); |
| 4813 ASSERT(ToRegister(instr->result()).is(input_reg)); | 4807 ASSERT(ToRegister(instr->result()).is(input_reg)); |
| 4814 __ Branch(USE_DELAY_SLOT, &done); | 4808 __ Branch(USE_DELAY_SLOT, &done); |
| (...skipping 921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5736 __ Subu(scratch, result, scratch); | 5730 __ Subu(scratch, result, scratch); |
| 5737 __ lw(result, FieldMemOperand(scratch, | 5731 __ lw(result, FieldMemOperand(scratch, |
| 5738 FixedArray::kHeaderSize - kPointerSize)); | 5732 FixedArray::kHeaderSize - kPointerSize)); |
| 5739 __ bind(&done); | 5733 __ bind(&done); |
| 5740 } | 5734 } |
| 5741 | 5735 |
| 5742 | 5736 |
| 5743 #undef __ | 5737 #undef __ |
| 5744 | 5738 |
| 5745 } } // namespace v8::internal | 5739 } } // namespace v8::internal |
| OLD | NEW |