OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1025 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1036 ASSERT(environment->HasBeenRegistered()); | 1036 ASSERT(environment->HasBeenRegistered()); |
1037 int id = environment->deoptimization_index(); | 1037 int id = environment->deoptimization_index(); |
1038 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 1038 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
1039 Address entry = | 1039 Address entry = |
1040 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 1040 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
1041 if (entry == NULL) { | 1041 if (entry == NULL) { |
1042 Abort(kBailoutWasNotPrepared); | 1042 Abort(kBailoutWasNotPrepared); |
1043 return; | 1043 return; |
1044 } | 1044 } |
1045 | 1045 |
1046 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { | 1046 if (DeoptEveryNTimes()) { |
1047 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); | 1047 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); |
1048 Label no_deopt; | 1048 Label no_deopt; |
1049 __ pushfd(); | 1049 __ pushfd(); |
1050 __ push(eax); | 1050 __ push(eax); |
1051 __ mov(eax, Operand::StaticVariable(count)); | 1051 __ mov(eax, Operand::StaticVariable(count)); |
1052 __ sub(eax, Immediate(1)); | 1052 __ sub(eax, Immediate(1)); |
1053 __ j(not_zero, &no_deopt, Label::kNear); | 1053 __ j(not_zero, &no_deopt, Label::kNear); |
1054 if (FLAG_trap_on_deopt) __ int3(); | 1054 if (FLAG_trap_on_deopt) __ int3(); |
1055 __ mov(eax, Immediate(FLAG_deopt_every_n_times)); | 1055 __ mov(eax, Immediate(FLAG_deopt_every_n_times)); |
1056 __ mov(Operand::StaticVariable(count), eax); | 1056 __ mov(Operand::StaticVariable(count), eax); |
(...skipping 983 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2040 if (index->value() == 0) { | 2040 if (index->value() == 0) { |
2041 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); | 2041 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); |
2042 } else { | 2042 } else { |
2043 if (index->value() < JSDate::kFirstUncachedField) { | 2043 if (index->value() < JSDate::kFirstUncachedField) { |
2044 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 2044 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
2045 __ mov(scratch, Operand::StaticVariable(stamp)); | 2045 __ mov(scratch, Operand::StaticVariable(stamp)); |
2046 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); | 2046 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); |
2047 __ j(not_equal, &runtime, Label::kNear); | 2047 __ j(not_equal, &runtime, Label::kNear); |
2048 __ mov(result, FieldOperand(object, JSDate::kValueOffset + | 2048 __ mov(result, FieldOperand(object, JSDate::kValueOffset + |
2049 kPointerSize * index->value())); | 2049 kPointerSize * index->value())); |
2050 __ jmp(&done); | 2050 __ jmp(&done, Label::kNear); |
2051 } | 2051 } |
2052 __ bind(&runtime); | 2052 __ bind(&runtime); |
2053 __ PrepareCallCFunction(2, scratch); | 2053 __ PrepareCallCFunction(2, scratch); |
2054 __ mov(Operand(esp, 0), object); | 2054 __ mov(Operand(esp, 0), object); |
2055 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index)); | 2055 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index)); |
2056 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 2056 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
2057 __ bind(&done); | 2057 __ bind(&done); |
2058 } | 2058 } |
2059 } | 2059 } |
2060 | 2060 |
(...skipping 579 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2640 __ ucomisd(input_reg, input_reg); | 2640 __ ucomisd(input_reg, input_reg); |
2641 EmitFalseBranch(instr, parity_odd); | 2641 EmitFalseBranch(instr, parity_odd); |
2642 } else { | 2642 } else { |
2643 // Put the value to the top of stack | 2643 // Put the value to the top of stack |
2644 X87Register src = ToX87Register(instr->object()); | 2644 X87Register src = ToX87Register(instr->object()); |
2645 X87LoadForUsage(src); | 2645 X87LoadForUsage(src); |
2646 __ fld(0); | 2646 __ fld(0); |
2647 __ fld(0); | 2647 __ fld(0); |
2648 __ FCmp(); | 2648 __ FCmp(); |
2649 Label ok; | 2649 Label ok; |
2650 __ j(parity_even, &ok); | 2650 __ j(parity_even, &ok, Label::kNear); |
2651 __ fstp(0); | 2651 __ fstp(0); |
2652 EmitFalseBranch(instr, no_condition); | 2652 EmitFalseBranch(instr, no_condition); |
2653 __ bind(&ok); | 2653 __ bind(&ok); |
2654 } | 2654 } |
2655 | 2655 |
2656 | 2656 |
2657 __ sub(esp, Immediate(kDoubleSize)); | 2657 __ sub(esp, Immediate(kDoubleSize)); |
2658 if (use_sse2) { | 2658 if (use_sse2) { |
2659 CpuFeatureScope scope(masm(), SSE2); | 2659 CpuFeatureScope scope(masm(), SSE2); |
2660 XMMRegister input_reg = ToDoubleRegister(instr->object()); | 2660 XMMRegister input_reg = ToDoubleRegister(instr->object()); |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2964 }; | 2964 }; |
2965 | 2965 |
2966 DeferredInstanceOfKnownGlobal* deferred; | 2966 DeferredInstanceOfKnownGlobal* deferred; |
2967 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr, x87_stack_); | 2967 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr, x87_stack_); |
2968 | 2968 |
2969 Label done, false_result; | 2969 Label done, false_result; |
2970 Register object = ToRegister(instr->value()); | 2970 Register object = ToRegister(instr->value()); |
2971 Register temp = ToRegister(instr->temp()); | 2971 Register temp = ToRegister(instr->temp()); |
2972 | 2972 |
2973 // A Smi is not an instance of anything. | 2973 // A Smi is not an instance of anything. |
2974 __ JumpIfSmi(object, &false_result); | 2974 __ JumpIfSmi(object, &false_result, Label::kNear); |
2975 | 2975 |
2976 // This is the inlined call site instanceof cache. The two occurences of the | 2976 // This is the inlined call site instanceof cache. The two occurences of the |
2977 // hole value will be patched to the last map/result pair generated by the | 2977 // hole value will be patched to the last map/result pair generated by the |
2978 // instanceof stub. | 2978 // instanceof stub. |
2979 Label cache_miss; | 2979 Label cache_miss; |
2980 Register map = ToRegister(instr->temp()); | 2980 Register map = ToRegister(instr->temp()); |
2981 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); | 2981 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); |
2982 __ bind(deferred->map_check()); // Label for calculating code patching. | 2982 __ bind(deferred->map_check()); // Label for calculating code patching. |
2983 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | 2983 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
2984 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map. | 2984 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map. |
2985 __ j(not_equal, &cache_miss, Label::kNear); | 2985 __ j(not_equal, &cache_miss, Label::kNear); |
2986 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. | 2986 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. |
2987 __ jmp(&done); | 2987 __ jmp(&done, Label::kNear); |
2988 | 2988 |
2989 // The inlined call site cache did not match. Check for null and string | 2989 // The inlined call site cache did not match. Check for null and string |
2990 // before calling the deferred code. | 2990 // before calling the deferred code. |
2991 __ bind(&cache_miss); | 2991 __ bind(&cache_miss); |
2992 // Null is not an instance of anything. | 2992 // Null is not an instance of anything. |
2993 __ cmp(object, factory()->null_value()); | 2993 __ cmp(object, factory()->null_value()); |
2994 __ j(equal, &false_result); | 2994 __ j(equal, &false_result, Label::kNear); |
2995 | 2995 |
2996 // String values are not instances of anything. | 2996 // String values are not instances of anything. |
2997 Condition is_string = masm_->IsObjectStringType(object, temp, temp); | 2997 Condition is_string = masm_->IsObjectStringType(object, temp, temp); |
2998 __ j(is_string, &false_result); | 2998 __ j(is_string, &false_result, Label::kNear); |
2999 | 2999 |
3000 // Go to the deferred code. | 3000 // Go to the deferred code. |
3001 __ jmp(deferred->entry()); | 3001 __ jmp(deferred->entry()); |
3002 | 3002 |
3003 __ bind(&false_result); | 3003 __ bind(&false_result); |
3004 __ mov(ToRegister(instr->result()), factory()->false_value()); | 3004 __ mov(ToRegister(instr->result()), factory()->false_value()); |
3005 | 3005 |
3006 // Here result has either true or false. Deferred code also produces true or | 3006 // Here result has either true or false. Deferred code also produces true or |
3007 // false object. | 3007 // false object. |
3008 __ bind(deferred->exit()); | 3008 __ bind(deferred->exit()); |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3133 } | 3133 } |
3134 int no_frame_start = -1; | 3134 int no_frame_start = -1; |
3135 if (NeedsEagerFrame()) { | 3135 if (NeedsEagerFrame()) { |
3136 __ mov(esp, ebp); | 3136 __ mov(esp, ebp); |
3137 __ pop(ebp); | 3137 __ pop(ebp); |
3138 no_frame_start = masm_->pc_offset(); | 3138 no_frame_start = masm_->pc_offset(); |
3139 } | 3139 } |
3140 if (dynamic_frame_alignment_) { | 3140 if (dynamic_frame_alignment_) { |
3141 Label no_padding; | 3141 Label no_padding; |
3142 __ cmp(edx, Immediate(kNoAlignmentPadding)); | 3142 __ cmp(edx, Immediate(kNoAlignmentPadding)); |
3143 __ j(equal, &no_padding); | 3143 __ j(equal, &no_padding, Label::kNear); |
3144 | 3144 |
3145 EmitReturn(instr, true); | 3145 EmitReturn(instr, true); |
3146 __ bind(&no_padding); | 3146 __ bind(&no_padding); |
3147 } | 3147 } |
3148 | 3148 |
3149 EmitReturn(instr, false); | 3149 EmitReturn(instr, false); |
3150 if (no_frame_start != -1) { | 3150 if (no_frame_start != -1) { |
3151 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | 3151 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); |
3152 } | 3152 } |
3153 } | 3153 } |
(...skipping 497 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3651 | 3651 |
3652 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { | 3652 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { |
3653 Register receiver = ToRegister(instr->receiver()); | 3653 Register receiver = ToRegister(instr->receiver()); |
3654 Register function = ToRegister(instr->function()); | 3654 Register function = ToRegister(instr->function()); |
3655 Register scratch = ToRegister(instr->temp()); | 3655 Register scratch = ToRegister(instr->temp()); |
3656 | 3656 |
3657 // If the receiver is null or undefined, we have to pass the global | 3657 // If the receiver is null or undefined, we have to pass the global |
3658 // object as a receiver to normal functions. Values have to be | 3658 // object as a receiver to normal functions. Values have to be |
3659 // passed unchanged to builtins and strict-mode functions. | 3659 // passed unchanged to builtins and strict-mode functions. |
3660 Label global_object, receiver_ok; | 3660 Label global_object, receiver_ok; |
| 3661 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
3661 | 3662 |
3662 // Do not transform the receiver to object for strict mode | 3663 // Do not transform the receiver to object for strict mode |
3663 // functions. | 3664 // functions. |
3664 __ mov(scratch, | 3665 __ mov(scratch, |
3665 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3666 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
3666 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset), | 3667 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset), |
3667 1 << SharedFunctionInfo::kStrictModeBitWithinByte); | 3668 1 << SharedFunctionInfo::kStrictModeBitWithinByte); |
3668 __ j(not_equal, &receiver_ok); // A near jump is not sufficient here! | 3669 __ j(not_equal, &receiver_ok, dist); |
3669 | 3670 |
3670 // Do not transform the receiver to object for builtins. | 3671 // Do not transform the receiver to object for builtins. |
3671 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset), | 3672 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset), |
3672 1 << SharedFunctionInfo::kNativeBitWithinByte); | 3673 1 << SharedFunctionInfo::kNativeBitWithinByte); |
3673 __ j(not_equal, &receiver_ok); | 3674 __ j(not_equal, &receiver_ok, dist); |
3674 | 3675 |
3675 // Normal function. Replace undefined or null with global receiver. | 3676 // Normal function. Replace undefined or null with global receiver. |
3676 __ cmp(receiver, factory()->null_value()); | 3677 __ cmp(receiver, factory()->null_value()); |
3677 __ j(equal, &global_object, Label::kNear); | 3678 __ j(equal, &global_object, Label::kNear); |
3678 __ cmp(receiver, factory()->undefined_value()); | 3679 __ cmp(receiver, factory()->undefined_value()); |
3679 __ j(equal, &global_object, Label::kNear); | 3680 __ j(equal, &global_object, Label::kNear); |
3680 | 3681 |
3681 // The receiver should be a JS object. | 3682 // The receiver should be a JS object. |
3682 __ test(receiver, Immediate(kSmiTagMask)); | 3683 __ test(receiver, Immediate(kSmiTagMask)); |
3683 DeoptimizeIf(equal, instr->environment()); | 3684 DeoptimizeIf(equal, instr->environment()); |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3872 | 3873 |
3873 // Preserve the value of all registers. | 3874 // Preserve the value of all registers. |
3874 PushSafepointRegistersScope scope(this); | 3875 PushSafepointRegistersScope scope(this); |
3875 | 3876 |
3876 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 3877 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
3877 // Check the sign of the argument. If the argument is positive, just | 3878 // Check the sign of the argument. If the argument is positive, just |
3878 // return it. We do not need to patch the stack since |input| and | 3879 // return it. We do not need to patch the stack since |input| and |
3879 // |result| are the same register and |input| will be restored | 3880 // |result| are the same register and |input| will be restored |
3880 // unchanged by popping safepoint registers. | 3881 // unchanged by popping safepoint registers. |
3881 __ test(tmp, Immediate(HeapNumber::kSignMask)); | 3882 __ test(tmp, Immediate(HeapNumber::kSignMask)); |
3882 __ j(zero, &done); | 3883 __ j(zero, &done, Label::kNear); |
3883 | 3884 |
3884 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); | 3885 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); |
3885 __ jmp(&allocated, Label::kNear); | 3886 __ jmp(&allocated, Label::kNear); |
3886 | 3887 |
3887 // Slow case: Call the runtime system to do the number allocation. | 3888 // Slow case: Call the runtime system to do the number allocation. |
3888 __ bind(&slow); | 3889 __ bind(&slow); |
3889 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, | 3890 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, |
3890 instr, instr->context()); | 3891 instr, instr->context()); |
3891 // Set the pointer to the new heap number in tmp. | 3892 // Set the pointer to the new heap number in tmp. |
3892 if (!tmp.is(eax)) __ mov(tmp, eax); | 3893 if (!tmp.is(eax)) __ mov(tmp, eax); |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4026 CpuFeatureScope scope(masm(), SSE2); | 4027 CpuFeatureScope scope(masm(), SSE2); |
4027 Register output_reg = ToRegister(instr->result()); | 4028 Register output_reg = ToRegister(instr->result()); |
4028 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 4029 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
4029 XMMRegister xmm_scratch = double_scratch0(); | 4030 XMMRegister xmm_scratch = double_scratch0(); |
4030 XMMRegister input_temp = ToDoubleRegister(instr->temp()); | 4031 XMMRegister input_temp = ToDoubleRegister(instr->temp()); |
4031 ExternalReference one_half = ExternalReference::address_of_one_half(); | 4032 ExternalReference one_half = ExternalReference::address_of_one_half(); |
4032 ExternalReference minus_one_half = | 4033 ExternalReference minus_one_half = |
4033 ExternalReference::address_of_minus_one_half(); | 4034 ExternalReference::address_of_minus_one_half(); |
4034 | 4035 |
4035 Label done, round_to_zero, below_one_half, do_not_compensate; | 4036 Label done, round_to_zero, below_one_half, do_not_compensate; |
| 4037 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 4038 |
4036 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); | 4039 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); |
4037 __ ucomisd(xmm_scratch, input_reg); | 4040 __ ucomisd(xmm_scratch, input_reg); |
4038 __ j(above, &below_one_half); | 4041 __ j(above, &below_one_half, Label::kNear); |
4039 | 4042 |
4040 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). | 4043 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). |
4041 __ addsd(xmm_scratch, input_reg); | 4044 __ addsd(xmm_scratch, input_reg); |
4042 __ cvttsd2si(output_reg, Operand(xmm_scratch)); | 4045 __ cvttsd2si(output_reg, Operand(xmm_scratch)); |
4043 // Overflow is signalled with minint. | 4046 // Overflow is signalled with minint. |
4044 __ cmp(output_reg, 0x80000000u); | 4047 __ cmp(output_reg, 0x80000000u); |
4045 __ RecordComment("D2I conversion overflow"); | 4048 __ RecordComment("D2I conversion overflow"); |
4046 DeoptimizeIf(equal, instr->environment()); | 4049 DeoptimizeIf(equal, instr->environment()); |
4047 __ jmp(&done); | 4050 __ jmp(&done, dist); |
4048 | 4051 |
4049 __ bind(&below_one_half); | 4052 __ bind(&below_one_half); |
4050 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); | 4053 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); |
4051 __ ucomisd(xmm_scratch, input_reg); | 4054 __ ucomisd(xmm_scratch, input_reg); |
4052 __ j(below_equal, &round_to_zero); | 4055 __ j(below_equal, &round_to_zero, Label::kNear); |
4053 | 4056 |
4054 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then | 4057 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then |
4055 // compare and compensate. | 4058 // compare and compensate. |
4056 __ movaps(input_temp, input_reg); // Do not alter input_reg. | 4059 __ movaps(input_temp, input_reg); // Do not alter input_reg. |
4057 __ subsd(input_temp, xmm_scratch); | 4060 __ subsd(input_temp, xmm_scratch); |
4058 __ cvttsd2si(output_reg, Operand(input_temp)); | 4061 __ cvttsd2si(output_reg, Operand(input_temp)); |
4059 // Catch minint due to overflow, and to prevent overflow when compensating. | 4062 // Catch minint due to overflow, and to prevent overflow when compensating. |
4060 __ cmp(output_reg, 0x80000000u); | 4063 __ cmp(output_reg, 0x80000000u); |
4061 __ RecordComment("D2I conversion overflow"); | 4064 __ RecordComment("D2I conversion overflow"); |
4062 DeoptimizeIf(equal, instr->environment()); | 4065 DeoptimizeIf(equal, instr->environment()); |
4063 | 4066 |
4064 __ Cvtsi2sd(xmm_scratch, output_reg); | 4067 __ Cvtsi2sd(xmm_scratch, output_reg); |
4065 __ ucomisd(xmm_scratch, input_temp); | 4068 __ ucomisd(xmm_scratch, input_temp); |
4066 __ j(equal, &done); | 4069 __ j(equal, &done, dist); |
4067 __ sub(output_reg, Immediate(1)); | 4070 __ sub(output_reg, Immediate(1)); |
4068 // No overflow because we already ruled out minint. | 4071 // No overflow because we already ruled out minint. |
4069 __ jmp(&done); | 4072 __ jmp(&done, dist); |
4070 | 4073 |
4071 __ bind(&round_to_zero); | 4074 __ bind(&round_to_zero); |
4072 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 4075 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
4073 // we can ignore the difference between a result of -0 and +0. | 4076 // we can ignore the difference between a result of -0 and +0. |
4074 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 4077 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
4075 // If the sign is positive, we return +0. | 4078 // If the sign is positive, we return +0. |
4076 __ movmskpd(output_reg, input_reg); | 4079 __ movmskpd(output_reg, input_reg); |
4077 __ test(output_reg, Immediate(1)); | 4080 __ test(output_reg, Immediate(1)); |
4078 __ RecordComment("Minus zero"); | 4081 __ RecordComment("Minus zero"); |
4079 DeoptimizeIf(not_zero, instr->environment()); | 4082 DeoptimizeIf(not_zero, instr->environment()); |
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4414 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); | 4417 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); |
4415 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4418 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4416 } else if (instr->arity() == 1) { | 4419 } else if (instr->arity() == 1) { |
4417 Label done; | 4420 Label done; |
4418 if (IsFastPackedElementsKind(kind)) { | 4421 if (IsFastPackedElementsKind(kind)) { |
4419 Label packed_case; | 4422 Label packed_case; |
4420 // We might need a change here | 4423 // We might need a change here |
4421 // look at the first argument | 4424 // look at the first argument |
4422 __ mov(ecx, Operand(esp, 0)); | 4425 __ mov(ecx, Operand(esp, 0)); |
4423 __ test(ecx, ecx); | 4426 __ test(ecx, ecx); |
4424 __ j(zero, &packed_case); | 4427 __ j(zero, &packed_case, Label::kNear); |
4425 | 4428 |
4426 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 4429 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
4427 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, | 4430 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, |
4428 override_mode); | 4431 override_mode); |
4429 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4432 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4430 __ jmp(&done); | 4433 __ jmp(&done, Label::kNear); |
4431 __ bind(&packed_case); | 4434 __ bind(&packed_case); |
4432 } | 4435 } |
4433 | 4436 |
4434 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); | 4437 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); |
4435 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4438 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4436 __ bind(&done); | 4439 __ bind(&done); |
4437 } else { | 4440 } else { |
4438 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); | 4441 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); |
4439 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4442 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4440 } | 4443 } |
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4717 instr->additional_index()); | 4720 instr->additional_index()); |
4718 | 4721 |
4719 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { | 4722 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { |
4720 CpuFeatureScope scope(masm(), SSE2); | 4723 CpuFeatureScope scope(masm(), SSE2); |
4721 XMMRegister value = ToDoubleRegister(instr->value()); | 4724 XMMRegister value = ToDoubleRegister(instr->value()); |
4722 | 4725 |
4723 if (instr->NeedsCanonicalization()) { | 4726 if (instr->NeedsCanonicalization()) { |
4724 Label have_value; | 4727 Label have_value; |
4725 | 4728 |
4726 __ ucomisd(value, value); | 4729 __ ucomisd(value, value); |
4727 __ j(parity_odd, &have_value); // NaN. | 4730 __ j(parity_odd, &have_value, Label::kNear); // NaN. |
4728 | 4731 |
4729 __ movsd(value, Operand::StaticVariable(canonical_nan_reference)); | 4732 __ movsd(value, Operand::StaticVariable(canonical_nan_reference)); |
4730 __ bind(&have_value); | 4733 __ bind(&have_value); |
4731 } | 4734 } |
4732 | 4735 |
4733 __ movsd(double_store_operand, value); | 4736 __ movsd(double_store_operand, value); |
4734 } else { | 4737 } else { |
4735 // Can't use SSE2 in the serializer | 4738 // Can't use SSE2 in the serializer |
4736 if (instr->hydrogen()->IsConstantHoleStore()) { | 4739 if (instr->hydrogen()->IsConstantHoleStore()) { |
4737 // This means we should store the (double) hole. No floating point | 4740 // This means we should store the (double) hole. No floating point |
(...skipping 15 matching lines...) Expand all Loading... |
4753 } else { | 4756 } else { |
4754 Label no_special_nan_handling; | 4757 Label no_special_nan_handling; |
4755 X87Register value = ToX87Register(instr->value()); | 4758 X87Register value = ToX87Register(instr->value()); |
4756 X87Fxch(value); | 4759 X87Fxch(value); |
4757 | 4760 |
4758 if (instr->NeedsCanonicalization()) { | 4761 if (instr->NeedsCanonicalization()) { |
4759 __ fld(0); | 4762 __ fld(0); |
4760 __ fld(0); | 4763 __ fld(0); |
4761 __ FCmp(); | 4764 __ FCmp(); |
4762 | 4765 |
4763 __ j(parity_odd, &no_special_nan_handling); | 4766 __ j(parity_odd, &no_special_nan_handling, Label::kNear); |
4764 __ sub(esp, Immediate(kDoubleSize)); | 4767 __ sub(esp, Immediate(kDoubleSize)); |
4765 __ fst_d(MemOperand(esp, 0)); | 4768 __ fst_d(MemOperand(esp, 0)); |
4766 __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)), | 4769 __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)), |
4767 Immediate(kHoleNanUpper32)); | 4770 Immediate(kHoleNanUpper32)); |
4768 __ add(esp, Immediate(kDoubleSize)); | 4771 __ add(esp, Immediate(kDoubleSize)); |
4769 Label canonicalize; | 4772 Label canonicalize; |
4770 __ j(not_equal, &canonicalize); | 4773 __ j(not_equal, &canonicalize, Label::kNear); |
4771 __ jmp(&no_special_nan_handling); | 4774 __ jmp(&no_special_nan_handling, Label::kNear); |
4772 __ bind(&canonicalize); | 4775 __ bind(&canonicalize); |
4773 __ fstp(0); | 4776 __ fstp(0); |
4774 __ fld_d(Operand::StaticVariable(canonical_nan_reference)); | 4777 __ fld_d(Operand::StaticVariable(canonical_nan_reference)); |
4775 } | 4778 } |
4776 | 4779 |
4777 __ bind(&no_special_nan_handling); | 4780 __ bind(&no_special_nan_handling); |
4778 __ fst_d(double_store_operand); | 4781 __ fst_d(double_store_operand); |
4779 } | 4782 } |
4780 } | 4783 } |
4781 } | 4784 } |
(...skipping 959 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5741 DeferredCheckMaps* deferred = NULL; | 5744 DeferredCheckMaps* deferred = NULL; |
5742 if (instr->hydrogen()->has_migration_target()) { | 5745 if (instr->hydrogen()->has_migration_target()) { |
5743 deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); | 5746 deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); |
5744 __ bind(deferred->check_maps()); | 5747 __ bind(deferred->check_maps()); |
5745 } | 5748 } |
5746 | 5749 |
5747 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); | 5750 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); |
5748 Label success; | 5751 Label success; |
5749 for (int i = 0; i < map_set.size() - 1; i++) { | 5752 for (int i = 0; i < map_set.size() - 1; i++) { |
5750 Handle<Map> map = map_set.at(i).handle(); | 5753 Handle<Map> map = map_set.at(i).handle(); |
5751 __ CompareMap(reg, map, &success); | 5754 __ CompareMap(reg, map); |
5752 __ j(equal, &success); | 5755 __ j(equal, &success, Label::kNear); |
5753 } | 5756 } |
5754 | 5757 |
5755 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); | 5758 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); |
5756 __ CompareMap(reg, map, &success); | 5759 __ CompareMap(reg, map); |
5757 if (instr->hydrogen()->has_migration_target()) { | 5760 if (instr->hydrogen()->has_migration_target()) { |
5758 __ j(not_equal, deferred->entry()); | 5761 __ j(not_equal, deferred->entry()); |
5759 } else { | 5762 } else { |
5760 DeoptimizeIf(not_equal, instr->environment()); | 5763 DeoptimizeIf(not_equal, instr->environment()); |
5761 } | 5764 } |
5762 | 5765 |
5763 __ bind(&success); | 5766 __ bind(&success); |
5764 } | 5767 } |
5765 | 5768 |
5766 | 5769 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5824 Register scratch2 = ToRegister(instr->scratch2()); | 5827 Register scratch2 = ToRegister(instr->scratch2()); |
5825 Register scratch3 = ToRegister(instr->scratch3()); | 5828 Register scratch3 = ToRegister(instr->scratch3()); |
5826 Label is_smi, done, heap_number, valid_exponent, | 5829 Label is_smi, done, heap_number, valid_exponent, |
5827 largest_value, zero_result, maybe_nan_or_infinity; | 5830 largest_value, zero_result, maybe_nan_or_infinity; |
5828 | 5831 |
5829 __ JumpIfSmi(input_reg, &is_smi); | 5832 __ JumpIfSmi(input_reg, &is_smi); |
5830 | 5833 |
5831 // Check for heap number | 5834 // Check for heap number |
5832 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5835 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
5833 factory()->heap_number_map()); | 5836 factory()->heap_number_map()); |
5834 __ j(equal, &heap_number, Label::kFar); | 5837 __ j(equal, &heap_number, Label::kNear); |
5835 | 5838 |
5836 // Check for undefined. Undefined is converted to zero for clamping | 5839 // Check for undefined. Undefined is converted to zero for clamping |
5837 // conversions. | 5840 // conversions. |
5838 __ cmp(input_reg, factory()->undefined_value()); | 5841 __ cmp(input_reg, factory()->undefined_value()); |
5839 DeoptimizeIf(not_equal, instr->environment()); | 5842 DeoptimizeIf(not_equal, instr->environment()); |
5840 __ jmp(&zero_result); | 5843 __ jmp(&zero_result, Label::kNear); |
5841 | 5844 |
5842 // Heap number | 5845 // Heap number |
5843 __ bind(&heap_number); | 5846 __ bind(&heap_number); |
5844 | 5847 |
5845 // Surprisingly, all of the hand-crafted bit-manipulations below are much | 5848 // Surprisingly, all of the hand-crafted bit-manipulations below are much |
5846 // faster than the x86 FPU built-in instruction, especially since "banker's | 5849 // faster than the x86 FPU built-in instruction, especially since "banker's |
5847 // rounding" would be additionally very expensive | 5850 // rounding" would be additionally very expensive |
5848 | 5851 |
5849 // Get exponent word. | 5852 // Get exponent word. |
5850 __ mov(scratch, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 5853 __ mov(scratch, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
5851 __ mov(scratch3, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 5854 __ mov(scratch3, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
5852 | 5855 |
5853 // Test for negative values --> clamp to zero | 5856 // Test for negative values --> clamp to zero |
5854 __ test(scratch, scratch); | 5857 __ test(scratch, scratch); |
5855 __ j(negative, &zero_result); | 5858 __ j(negative, &zero_result, Label::kNear); |
5856 | 5859 |
5857 // Get exponent alone in scratch2. | 5860 // Get exponent alone in scratch2. |
5858 __ mov(scratch2, scratch); | 5861 __ mov(scratch2, scratch); |
5859 __ and_(scratch2, HeapNumber::kExponentMask); | 5862 __ and_(scratch2, HeapNumber::kExponentMask); |
5860 __ shr(scratch2, HeapNumber::kExponentShift); | 5863 __ shr(scratch2, HeapNumber::kExponentShift); |
5861 __ j(zero, &zero_result); | 5864 __ j(zero, &zero_result, Label::kNear); |
5862 __ sub(scratch2, Immediate(HeapNumber::kExponentBias - 1)); | 5865 __ sub(scratch2, Immediate(HeapNumber::kExponentBias - 1)); |
5863 __ j(negative, &zero_result); | 5866 __ j(negative, &zero_result, Label::kNear); |
5864 | 5867 |
5865 const uint32_t non_int8_exponent = 7; | 5868 const uint32_t non_int8_exponent = 7; |
5866 __ cmp(scratch2, Immediate(non_int8_exponent + 1)); | 5869 __ cmp(scratch2, Immediate(non_int8_exponent + 1)); |
5867 // If the exponent is too big, check for special values. | 5870 // If the exponent is too big, check for special values. |
5868 __ j(greater, &maybe_nan_or_infinity, Label::kNear); | 5871 __ j(greater, &maybe_nan_or_infinity, Label::kNear); |
5869 | 5872 |
5870 __ bind(&valid_exponent); | 5873 __ bind(&valid_exponent); |
5871 // Exponent word in scratch, exponent in scratch2. We know that 0 <= exponent | 5874 // Exponent word in scratch, exponent in scratch2. We know that 0 <= exponent |
5872 // < 7. The shift bias is the number of bits to shift the mantissa such that | 5875 // < 7. The shift bias is the number of bits to shift the mantissa such that |
5873 // with an exponent of 7 such the that top-most one is in bit 30, allowing | 5876 // with an exponent of 7 such the that top-most one is in bit 30, allowing |
(...skipping 10 matching lines...) Expand all Loading... |
5884 __ shl_cl(scratch); | 5887 __ shl_cl(scratch); |
5885 // Use "banker's rounding" to spec: If fractional part of number is 0.5, then | 5888 // Use "banker's rounding" to spec: If fractional part of number is 0.5, then |
5886 // use the bit in the "ones" place and add it to the "halves" place, which has | 5889 // use the bit in the "ones" place and add it to the "halves" place, which has |
5887 // the effect of rounding to even. | 5890 // the effect of rounding to even. |
5888 __ mov(scratch2, scratch); | 5891 __ mov(scratch2, scratch); |
5889 const uint32_t one_half_bit_shift = 30 - sizeof(uint8_t) * 8; | 5892 const uint32_t one_half_bit_shift = 30 - sizeof(uint8_t) * 8; |
5890 const uint32_t one_bit_shift = one_half_bit_shift + 1; | 5893 const uint32_t one_bit_shift = one_half_bit_shift + 1; |
5891 __ and_(scratch2, Immediate((1 << one_bit_shift) - 1)); | 5894 __ and_(scratch2, Immediate((1 << one_bit_shift) - 1)); |
5892 __ cmp(scratch2, Immediate(1 << one_half_bit_shift)); | 5895 __ cmp(scratch2, Immediate(1 << one_half_bit_shift)); |
5893 Label no_round; | 5896 Label no_round; |
5894 __ j(less, &no_round); | 5897 __ j(less, &no_round, Label::kNear); |
5895 Label round_up; | 5898 Label round_up; |
5896 __ mov(scratch2, Immediate(1 << one_half_bit_shift)); | 5899 __ mov(scratch2, Immediate(1 << one_half_bit_shift)); |
5897 __ j(greater, &round_up); | 5900 __ j(greater, &round_up, Label::kNear); |
5898 __ test(scratch3, scratch3); | 5901 __ test(scratch3, scratch3); |
5899 __ j(not_zero, &round_up); | 5902 __ j(not_zero, &round_up, Label::kNear); |
5900 __ mov(scratch2, scratch); | 5903 __ mov(scratch2, scratch); |
5901 __ and_(scratch2, Immediate(1 << one_bit_shift)); | 5904 __ and_(scratch2, Immediate(1 << one_bit_shift)); |
5902 __ shr(scratch2, 1); | 5905 __ shr(scratch2, 1); |
5903 __ bind(&round_up); | 5906 __ bind(&round_up); |
5904 __ add(scratch, scratch2); | 5907 __ add(scratch, scratch2); |
5905 __ j(overflow, &largest_value); | 5908 __ j(overflow, &largest_value, Label::kNear); |
5906 __ bind(&no_round); | 5909 __ bind(&no_round); |
5907 __ shr(scratch, 23); | 5910 __ shr(scratch, 23); |
5908 __ mov(result_reg, scratch); | 5911 __ mov(result_reg, scratch); |
5909 __ jmp(&done, Label::kNear); | 5912 __ jmp(&done, Label::kNear); |
5910 | 5913 |
5911 __ bind(&maybe_nan_or_infinity); | 5914 __ bind(&maybe_nan_or_infinity); |
5912 // Check for NaN/Infinity, all other values map to 255 | 5915 // Check for NaN/Infinity, all other values map to 255 |
5913 __ cmp(scratch2, Immediate(HeapNumber::kInfinityOrNanExponent + 1)); | 5916 __ cmp(scratch2, Immediate(HeapNumber::kInfinityOrNanExponent + 1)); |
5914 __ j(not_equal, &largest_value, Label::kNear); | 5917 __ j(not_equal, &largest_value, Label::kNear); |
5915 | 5918 |
5916 // Check for NaN, which differs from Infinity in that at least one mantissa | 5919 // Check for NaN, which differs from Infinity in that at least one mantissa |
5917 // bit is set. | 5920 // bit is set. |
5918 __ and_(scratch, HeapNumber::kMantissaMask); | 5921 __ and_(scratch, HeapNumber::kMantissaMask); |
5919 __ or_(scratch, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 5922 __ or_(scratch, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
5920 __ j(not_zero, &zero_result); // M!=0 --> NaN | 5923 __ j(not_zero, &zero_result, Label::kNear); // M!=0 --> NaN |
5921 // Infinity -> Fall through to map to 255. | 5924 // Infinity -> Fall through to map to 255. |
5922 | 5925 |
5923 __ bind(&largest_value); | 5926 __ bind(&largest_value); |
5924 __ mov(result_reg, Immediate(255)); | 5927 __ mov(result_reg, Immediate(255)); |
5925 __ jmp(&done, Label::kNear); | 5928 __ jmp(&done, Label::kNear); |
5926 | 5929 |
5927 __ bind(&zero_result); | 5930 __ bind(&zero_result); |
5928 __ xor_(result_reg, result_reg); | 5931 __ xor_(result_reg, result_reg); |
5929 __ jmp(&done); | 5932 __ jmp(&done, Label::kNear); |
5930 | 5933 |
5931 // smi | 5934 // smi |
5932 __ bind(&is_smi); | 5935 __ bind(&is_smi); |
5933 if (!input_reg.is(result_reg)) { | 5936 if (!input_reg.is(result_reg)) { |
5934 __ mov(result_reg, input_reg); | 5937 __ mov(result_reg, input_reg); |
5935 } | 5938 } |
5936 __ SmiUntag(result_reg); | 5939 __ SmiUntag(result_reg); |
5937 __ ClampUint8(result_reg); | 5940 __ ClampUint8(result_reg); |
5938 __ bind(&done); | 5941 __ bind(&done); |
5939 } | 5942 } |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6067 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 6070 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
6068 __ push(Immediate(instr->hydrogen()->pattern())); | 6071 __ push(Immediate(instr->hydrogen()->pattern())); |
6069 __ push(Immediate(instr->hydrogen()->flags())); | 6072 __ push(Immediate(instr->hydrogen()->flags())); |
6070 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 6073 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); |
6071 __ mov(ebx, eax); | 6074 __ mov(ebx, eax); |
6072 | 6075 |
6073 __ bind(&materialized); | 6076 __ bind(&materialized); |
6074 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 6077 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
6075 Label allocated, runtime_allocate; | 6078 Label allocated, runtime_allocate; |
6076 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 6079 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
6077 __ jmp(&allocated); | 6080 __ jmp(&allocated, Label::kNear); |
6078 | 6081 |
6079 __ bind(&runtime_allocate); | 6082 __ bind(&runtime_allocate); |
6080 __ push(ebx); | 6083 __ push(ebx); |
6081 __ push(Immediate(Smi::FromInt(size))); | 6084 __ push(Immediate(Smi::FromInt(size))); |
6082 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 6085 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
6083 __ pop(ebx); | 6086 __ pop(ebx); |
6084 | 6087 |
6085 __ bind(&allocated); | 6088 __ bind(&allocated); |
6086 // Copy the content into the newly allocated memory. | 6089 // Copy the content into the newly allocated memory. |
6087 // (Unroll copy loop once for better throughput). | 6090 // (Unroll copy loop once for better throughput). |
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6393 __ bind(&use_cache); | 6396 __ bind(&use_cache); |
6394 } | 6397 } |
6395 | 6398 |
6396 | 6399 |
6397 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 6400 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
6398 Register map = ToRegister(instr->map()); | 6401 Register map = ToRegister(instr->map()); |
6399 Register result = ToRegister(instr->result()); | 6402 Register result = ToRegister(instr->result()); |
6400 Label load_cache, done; | 6403 Label load_cache, done; |
6401 __ EnumLength(result, map); | 6404 __ EnumLength(result, map); |
6402 __ cmp(result, Immediate(Smi::FromInt(0))); | 6405 __ cmp(result, Immediate(Smi::FromInt(0))); |
6403 __ j(not_equal, &load_cache); | 6406 __ j(not_equal, &load_cache, Label::kNear); |
6404 __ mov(result, isolate()->factory()->empty_fixed_array()); | 6407 __ mov(result, isolate()->factory()->empty_fixed_array()); |
6405 __ jmp(&done); | 6408 __ jmp(&done, Label::kNear); |
6406 | 6409 |
6407 __ bind(&load_cache); | 6410 __ bind(&load_cache); |
6408 __ LoadInstanceDescriptors(map, result); | 6411 __ LoadInstanceDescriptors(map, result); |
6409 __ mov(result, | 6412 __ mov(result, |
6410 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 6413 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
6411 __ mov(result, | 6414 __ mov(result, |
6412 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 6415 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
6413 __ bind(&done); | 6416 __ bind(&done); |
6414 __ test(result, result); | 6417 __ test(result, result); |
6415 DeoptimizeIf(equal, instr->environment()); | 6418 DeoptimizeIf(equal, instr->environment()); |
6416 } | 6419 } |
6417 | 6420 |
6418 | 6421 |
6419 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 6422 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
6420 Register object = ToRegister(instr->value()); | 6423 Register object = ToRegister(instr->value()); |
6421 __ cmp(ToRegister(instr->map()), | 6424 __ cmp(ToRegister(instr->map()), |
6422 FieldOperand(object, HeapObject::kMapOffset)); | 6425 FieldOperand(object, HeapObject::kMapOffset)); |
6423 DeoptimizeIf(not_equal, instr->environment()); | 6426 DeoptimizeIf(not_equal, instr->environment()); |
6424 } | 6427 } |
6425 | 6428 |
6426 | 6429 |
6427 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 6430 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
6428 Register object = ToRegister(instr->object()); | 6431 Register object = ToRegister(instr->object()); |
6429 Register index = ToRegister(instr->index()); | 6432 Register index = ToRegister(instr->index()); |
6430 | 6433 |
6431 Label out_of_object, done; | 6434 Label out_of_object, done; |
6432 __ cmp(index, Immediate(0)); | 6435 __ cmp(index, Immediate(0)); |
6433 __ j(less, &out_of_object); | 6436 __ j(less, &out_of_object, Label::kNear); |
6434 __ mov(object, FieldOperand(object, | 6437 __ mov(object, FieldOperand(object, |
6435 index, | 6438 index, |
6436 times_half_pointer_size, | 6439 times_half_pointer_size, |
6437 JSObject::kHeaderSize)); | 6440 JSObject::kHeaderSize)); |
6438 __ jmp(&done, Label::kNear); | 6441 __ jmp(&done, Label::kNear); |
6439 | 6442 |
6440 __ bind(&out_of_object); | 6443 __ bind(&out_of_object); |
6441 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 6444 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
6442 __ neg(index); | 6445 __ neg(index); |
6443 // Index is now equal to out of object property index plus 1. | 6446 // Index is now equal to out of object property index plus 1. |
6444 __ mov(object, FieldOperand(object, | 6447 __ mov(object, FieldOperand(object, |
6445 index, | 6448 index, |
6446 times_half_pointer_size, | 6449 times_half_pointer_size, |
6447 FixedArray::kHeaderSize - kPointerSize)); | 6450 FixedArray::kHeaderSize - kPointerSize)); |
6448 __ bind(&done); | 6451 __ bind(&done); |
6449 } | 6452 } |
6450 | 6453 |
6451 | 6454 |
6452 #undef __ | 6455 #undef __ |
6453 | 6456 |
6454 } } // namespace v8::internal | 6457 } } // namespace v8::internal |
6455 | 6458 |
6456 #endif // V8_TARGET_ARCH_IA32 | 6459 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |