OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2029 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2040 if (index->value() == 0) { | 2040 if (index->value() == 0) { |
2041 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); | 2041 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); |
2042 } else { | 2042 } else { |
2043 if (index->value() < JSDate::kFirstUncachedField) { | 2043 if (index->value() < JSDate::kFirstUncachedField) { |
2044 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 2044 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
2045 __ mov(scratch, Operand::StaticVariable(stamp)); | 2045 __ mov(scratch, Operand::StaticVariable(stamp)); |
2046 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); | 2046 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); |
2047 __ j(not_equal, &runtime, Label::kNear); | 2047 __ j(not_equal, &runtime, Label::kNear); |
2048 __ mov(result, FieldOperand(object, JSDate::kValueOffset + | 2048 __ mov(result, FieldOperand(object, JSDate::kValueOffset + |
2049 kPointerSize * index->value())); | 2049 kPointerSize * index->value())); |
2050 __ jmp(&done); | 2050 __ jmp(&done, Label::kNear); |
2051 } | 2051 } |
2052 __ bind(&runtime); | 2052 __ bind(&runtime); |
2053 __ PrepareCallCFunction(2, scratch); | 2053 __ PrepareCallCFunction(2, scratch); |
2054 __ mov(Operand(esp, 0), object); | 2054 __ mov(Operand(esp, 0), object); |
2055 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index)); | 2055 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index)); |
2056 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 2056 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
2057 __ bind(&done); | 2057 __ bind(&done); |
2058 } | 2058 } |
2059 } | 2059 } |
2060 | 2060 |
(...skipping 521 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2582 __ ucomisd(input_reg, input_reg); | 2582 __ ucomisd(input_reg, input_reg); |
2583 EmitFalseBranch(instr, parity_odd); | 2583 EmitFalseBranch(instr, parity_odd); |
2584 } else { | 2584 } else { |
2585 // Put the value to the top of stack | 2585 // Put the value to the top of stack |
2586 X87Register src = ToX87Register(instr->object()); | 2586 X87Register src = ToX87Register(instr->object()); |
2587 X87LoadForUsage(src); | 2587 X87LoadForUsage(src); |
2588 __ fld(0); | 2588 __ fld(0); |
2589 __ fld(0); | 2589 __ fld(0); |
2590 __ FCmp(); | 2590 __ FCmp(); |
2591 Label ok; | 2591 Label ok; |
2592 __ j(parity_even, &ok); | 2592 __ j(parity_even, &ok, Label::kNear); |
2593 __ fstp(0); | 2593 __ fstp(0); |
2594 EmitFalseBranch(instr, no_condition); | 2594 EmitFalseBranch(instr, no_condition); |
2595 __ bind(&ok); | 2595 __ bind(&ok); |
2596 } | 2596 } |
2597 | 2597 |
2598 | 2598 |
2599 __ sub(esp, Immediate(kDoubleSize)); | 2599 __ sub(esp, Immediate(kDoubleSize)); |
2600 if (use_sse2) { | 2600 if (use_sse2) { |
2601 CpuFeatureScope scope(masm(), SSE2); | 2601 CpuFeatureScope scope(masm(), SSE2); |
2602 XMMRegister input_reg = ToDoubleRegister(instr->object()); | 2602 XMMRegister input_reg = ToDoubleRegister(instr->object()); |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2906 }; | 2906 }; |
2907 | 2907 |
2908 DeferredInstanceOfKnownGlobal* deferred; | 2908 DeferredInstanceOfKnownGlobal* deferred; |
2909 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr, x87_stack_); | 2909 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr, x87_stack_); |
2910 | 2910 |
2911 Label done, false_result; | 2911 Label done, false_result; |
2912 Register object = ToRegister(instr->value()); | 2912 Register object = ToRegister(instr->value()); |
2913 Register temp = ToRegister(instr->temp()); | 2913 Register temp = ToRegister(instr->temp()); |
2914 | 2914 |
2915 // A Smi is not an instance of anything. | 2915 // A Smi is not an instance of anything. |
2916 __ JumpIfSmi(object, &false_result); | 2916 __ JumpIfSmi(object, &false_result, Label::kNear); |
2917 | 2917 |
2918 // This is the inlined call site instanceof cache. The two occurences of the | 2918 // This is the inlined call site instanceof cache. The two occurences of the |
2919 // hole value will be patched to the last map/result pair generated by the | 2919 // hole value will be patched to the last map/result pair generated by the |
2920 // instanceof stub. | 2920 // instanceof stub. |
2921 Label cache_miss; | 2921 Label cache_miss; |
2922 Register map = ToRegister(instr->temp()); | 2922 Register map = ToRegister(instr->temp()); |
2923 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); | 2923 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); |
2924 __ bind(deferred->map_check()); // Label for calculating code patching. | 2924 __ bind(deferred->map_check()); // Label for calculating code patching. |
2925 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | 2925 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
2926 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map. | 2926 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map. |
2927 __ j(not_equal, &cache_miss, Label::kNear); | 2927 __ j(not_equal, &cache_miss, Label::kNear); |
2928 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. | 2928 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. |
2929 __ jmp(&done); | 2929 __ jmp(&done, Label::kNear); |
2930 | 2930 |
2931 // The inlined call site cache did not match. Check for null and string | 2931 // The inlined call site cache did not match. Check for null and string |
2932 // before calling the deferred code. | 2932 // before calling the deferred code. |
2933 __ bind(&cache_miss); | 2933 __ bind(&cache_miss); |
2934 // Null is not an instance of anything. | 2934 // Null is not an instance of anything. |
2935 __ cmp(object, factory()->null_value()); | 2935 __ cmp(object, factory()->null_value()); |
2936 __ j(equal, &false_result); | 2936 __ j(equal, &false_result, Label::kNear); |
2937 | 2937 |
2938 // String values are not instances of anything. | 2938 // String values are not instances of anything. |
2939 Condition is_string = masm_->IsObjectStringType(object, temp, temp); | 2939 Condition is_string = masm_->IsObjectStringType(object, temp, temp); |
2940 __ j(is_string, &false_result); | 2940 __ j(is_string, &false_result, Label::kNear); |
2941 | 2941 |
2942 // Go to the deferred code. | 2942 // Go to the deferred code. |
2943 __ jmp(deferred->entry()); | 2943 __ jmp(deferred->entry()); |
2944 | 2944 |
2945 __ bind(&false_result); | 2945 __ bind(&false_result); |
2946 __ mov(ToRegister(instr->result()), factory()->false_value()); | 2946 __ mov(ToRegister(instr->result()), factory()->false_value()); |
2947 | 2947 |
2948 // Here result has either true or false. Deferred code also produces true or | 2948 // Here result has either true or false. Deferred code also produces true or |
2949 // false object. | 2949 // false object. |
2950 __ bind(deferred->exit()); | 2950 __ bind(deferred->exit()); |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3075 } | 3075 } |
3076 int no_frame_start = -1; | 3076 int no_frame_start = -1; |
3077 if (NeedsEagerFrame()) { | 3077 if (NeedsEagerFrame()) { |
3078 __ mov(esp, ebp); | 3078 __ mov(esp, ebp); |
3079 __ pop(ebp); | 3079 __ pop(ebp); |
3080 no_frame_start = masm_->pc_offset(); | 3080 no_frame_start = masm_->pc_offset(); |
3081 } | 3081 } |
3082 if (dynamic_frame_alignment_) { | 3082 if (dynamic_frame_alignment_) { |
3083 Label no_padding; | 3083 Label no_padding; |
3084 __ cmp(edx, Immediate(kNoAlignmentPadding)); | 3084 __ cmp(edx, Immediate(kNoAlignmentPadding)); |
3085 __ j(equal, &no_padding); | 3085 __ j(equal, &no_padding, Label::kNear); |
3086 | 3086 |
3087 EmitReturn(instr, true); | 3087 EmitReturn(instr, true); |
3088 __ bind(&no_padding); | 3088 __ bind(&no_padding); |
3089 } | 3089 } |
3090 | 3090 |
3091 EmitReturn(instr, false); | 3091 EmitReturn(instr, false); |
3092 if (no_frame_start != -1) { | 3092 if (no_frame_start != -1) { |
3093 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | 3093 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset()); |
3094 } | 3094 } |
3095 } | 3095 } |
(...skipping 504 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3600 // object as a receiver to normal functions. Values have to be | 3600 // object as a receiver to normal functions. Values have to be |
3601 // passed unchanged to builtins and strict-mode functions. | 3601 // passed unchanged to builtins and strict-mode functions. |
3602 Label global_object, receiver_ok; | 3602 Label global_object, receiver_ok; |
3603 | 3603 |
3604 // Do not transform the receiver to object for strict mode | 3604 // Do not transform the receiver to object for strict mode |
3605 // functions. | 3605 // functions. |
3606 __ mov(scratch, | 3606 __ mov(scratch, |
3607 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3607 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
3608 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset), | 3608 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset), |
3609 1 << SharedFunctionInfo::kStrictModeBitWithinByte); | 3609 1 << SharedFunctionInfo::kStrictModeBitWithinByte); |
3610 __ j(not_equal, &receiver_ok); // A near jump is not sufficient here! | 3610 __ j(not_equal, &receiver_ok, Label::kNear); |
Jakob Kummerow
2013/10/28 10:37:19
Why do you think this comment is not true anymore?
Weiliang
2013/10/28 14:31:49
Yes, thanks for pointing it out. near jump is not
| |
3611 | 3611 |
3612 // Do not transform the receiver to object for builtins. | 3612 // Do not transform the receiver to object for builtins. |
3613 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset), | 3613 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset), |
3614 1 << SharedFunctionInfo::kNativeBitWithinByte); | 3614 1 << SharedFunctionInfo::kNativeBitWithinByte); |
3615 __ j(not_equal, &receiver_ok); | 3615 __ j(not_equal, &receiver_ok, Label::kNear); |
3616 | 3616 |
3617 // Normal function. Replace undefined or null with global receiver. | 3617 // Normal function. Replace undefined or null with global receiver. |
3618 __ cmp(receiver, factory()->null_value()); | 3618 __ cmp(receiver, factory()->null_value()); |
3619 __ j(equal, &global_object, Label::kNear); | 3619 __ j(equal, &global_object, Label::kNear); |
3620 __ cmp(receiver, factory()->undefined_value()); | 3620 __ cmp(receiver, factory()->undefined_value()); |
3621 __ j(equal, &global_object, Label::kNear); | 3621 __ j(equal, &global_object, Label::kNear); |
3622 | 3622 |
3623 // The receiver should be a JS object. | 3623 // The receiver should be a JS object. |
3624 __ test(receiver, Immediate(kSmiTagMask)); | 3624 __ test(receiver, Immediate(kSmiTagMask)); |
3625 DeoptimizeIf(equal, instr->environment()); | 3625 DeoptimizeIf(equal, instr->environment()); |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3814 | 3814 |
3815 // Preserve the value of all registers. | 3815 // Preserve the value of all registers. |
3816 PushSafepointRegistersScope scope(this); | 3816 PushSafepointRegistersScope scope(this); |
3817 | 3817 |
3818 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 3818 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
3819 // Check the sign of the argument. If the argument is positive, just | 3819 // Check the sign of the argument. If the argument is positive, just |
3820 // return it. We do not need to patch the stack since |input| and | 3820 // return it. We do not need to patch the stack since |input| and |
3821 // |result| are the same register and |input| will be restored | 3821 // |result| are the same register and |input| will be restored |
3822 // unchanged by popping safepoint registers. | 3822 // unchanged by popping safepoint registers. |
3823 __ test(tmp, Immediate(HeapNumber::kSignMask)); | 3823 __ test(tmp, Immediate(HeapNumber::kSignMask)); |
3824 __ j(zero, &done); | 3824 __ j(zero, &done, Label::kNear); |
3825 | 3825 |
3826 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); | 3826 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow); |
3827 __ jmp(&allocated, Label::kNear); | 3827 __ jmp(&allocated, Label::kNear); |
3828 | 3828 |
3829 // Slow case: Call the runtime system to do the number allocation. | 3829 // Slow case: Call the runtime system to do the number allocation. |
3830 __ bind(&slow); | 3830 __ bind(&slow); |
3831 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, | 3831 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, |
3832 instr, instr->context()); | 3832 instr, instr->context()); |
3833 // Set the pointer to the new heap number in tmp. | 3833 // Set the pointer to the new heap number in tmp. |
3834 if (!tmp.is(eax)) __ mov(tmp, eax); | 3834 if (!tmp.is(eax)) __ mov(tmp, eax); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3970 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3970 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3971 XMMRegister xmm_scratch = double_scratch0(); | 3971 XMMRegister xmm_scratch = double_scratch0(); |
3972 XMMRegister input_temp = ToDoubleRegister(instr->temp()); | 3972 XMMRegister input_temp = ToDoubleRegister(instr->temp()); |
3973 ExternalReference one_half = ExternalReference::address_of_one_half(); | 3973 ExternalReference one_half = ExternalReference::address_of_one_half(); |
3974 ExternalReference minus_one_half = | 3974 ExternalReference minus_one_half = |
3975 ExternalReference::address_of_minus_one_half(); | 3975 ExternalReference::address_of_minus_one_half(); |
3976 | 3976 |
3977 Label done, round_to_zero, below_one_half, do_not_compensate; | 3977 Label done, round_to_zero, below_one_half, do_not_compensate; |
3978 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); | 3978 __ movsd(xmm_scratch, Operand::StaticVariable(one_half)); |
3979 __ ucomisd(xmm_scratch, input_reg); | 3979 __ ucomisd(xmm_scratch, input_reg); |
3980 __ j(above, &below_one_half); | 3980 __ j(above, &below_one_half, Label::kNear); |
3981 | 3981 |
3982 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). | 3982 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). |
3983 __ addsd(xmm_scratch, input_reg); | 3983 __ addsd(xmm_scratch, input_reg); |
3984 __ cvttsd2si(output_reg, Operand(xmm_scratch)); | 3984 __ cvttsd2si(output_reg, Operand(xmm_scratch)); |
3985 // Overflow is signalled with minint. | 3985 // Overflow is signalled with minint. |
3986 __ cmp(output_reg, 0x80000000u); | 3986 __ cmp(output_reg, 0x80000000u); |
3987 __ RecordComment("D2I conversion overflow"); | 3987 __ RecordComment("D2I conversion overflow"); |
3988 DeoptimizeIf(equal, instr->environment()); | 3988 DeoptimizeIf(equal, instr->environment()); |
3989 __ jmp(&done); | 3989 __ jmp(&done, Label::kNear); |
3990 | 3990 |
3991 __ bind(&below_one_half); | 3991 __ bind(&below_one_half); |
3992 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); | 3992 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half)); |
3993 __ ucomisd(xmm_scratch, input_reg); | 3993 __ ucomisd(xmm_scratch, input_reg); |
3994 __ j(below_equal, &round_to_zero); | 3994 __ j(below_equal, &round_to_zero, Label::kNear); |
3995 | 3995 |
3996 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then | 3996 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then |
3997 // compare and compensate. | 3997 // compare and compensate. |
3998 __ movsd(input_temp, input_reg); // Do not alter input_reg. | 3998 __ movsd(input_temp, input_reg); // Do not alter input_reg. |
3999 __ subsd(input_temp, xmm_scratch); | 3999 __ subsd(input_temp, xmm_scratch); |
4000 __ cvttsd2si(output_reg, Operand(input_temp)); | 4000 __ cvttsd2si(output_reg, Operand(input_temp)); |
4001 // Catch minint due to overflow, and to prevent overflow when compensating. | 4001 // Catch minint due to overflow, and to prevent overflow when compensating. |
4002 __ cmp(output_reg, 0x80000000u); | 4002 __ cmp(output_reg, 0x80000000u); |
4003 __ RecordComment("D2I conversion overflow"); | 4003 __ RecordComment("D2I conversion overflow"); |
4004 DeoptimizeIf(equal, instr->environment()); | 4004 DeoptimizeIf(equal, instr->environment()); |
4005 | 4005 |
4006 __ Cvtsi2sd(xmm_scratch, output_reg); | 4006 __ Cvtsi2sd(xmm_scratch, output_reg); |
4007 __ ucomisd(xmm_scratch, input_temp); | 4007 __ ucomisd(xmm_scratch, input_temp); |
4008 __ j(equal, &done); | 4008 __ j(equal, &done, Label::kNear); |
4009 __ sub(output_reg, Immediate(1)); | 4009 __ sub(output_reg, Immediate(1)); |
4010 // No overflow because we already ruled out minint. | 4010 // No overflow because we already ruled out minint. |
4011 __ jmp(&done); | 4011 __ jmp(&done, Label::kNear); |
4012 | 4012 |
4013 __ bind(&round_to_zero); | 4013 __ bind(&round_to_zero); |
4014 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if | 4014 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if |
4015 // we can ignore the difference between a result of -0 and +0. | 4015 // we can ignore the difference between a result of -0 and +0. |
4016 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 4016 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
4017 // If the sign is positive, we return +0. | 4017 // If the sign is positive, we return +0. |
4018 __ movmskpd(output_reg, input_reg); | 4018 __ movmskpd(output_reg, input_reg); |
4019 __ test(output_reg, Immediate(1)); | 4019 __ test(output_reg, Immediate(1)); |
4020 __ RecordComment("Minus zero"); | 4020 __ RecordComment("Minus zero"); |
4021 DeoptimizeIf(not_zero, instr->environment()); | 4021 DeoptimizeIf(not_zero, instr->environment()); |
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4356 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); | 4356 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); |
4357 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4357 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4358 } else if (instr->arity() == 1) { | 4358 } else if (instr->arity() == 1) { |
4359 Label done; | 4359 Label done; |
4360 if (IsFastPackedElementsKind(kind)) { | 4360 if (IsFastPackedElementsKind(kind)) { |
4361 Label packed_case; | 4361 Label packed_case; |
4362 // We might need a change here | 4362 // We might need a change here |
4363 // look at the first argument | 4363 // look at the first argument |
4364 __ mov(ecx, Operand(esp, 0)); | 4364 __ mov(ecx, Operand(esp, 0)); |
4365 __ test(ecx, ecx); | 4365 __ test(ecx, ecx); |
4366 __ j(zero, &packed_case); | 4366 __ j(zero, &packed_case, Label::kNear); |
4367 | 4367 |
4368 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 4368 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
4369 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, | 4369 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, |
4370 override_mode); | 4370 override_mode); |
4371 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4371 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4372 __ jmp(&done); | 4372 __ jmp(&done, Label::kNear); |
4373 __ bind(&packed_case); | 4373 __ bind(&packed_case); |
4374 } | 4374 } |
4375 | 4375 |
4376 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); | 4376 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); |
4377 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4377 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4378 __ bind(&done); | 4378 __ bind(&done); |
4379 } else { | 4379 } else { |
4380 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); | 4380 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); |
4381 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 4381 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
4382 } | 4382 } |
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4659 instr->additional_index()); | 4659 instr->additional_index()); |
4660 | 4660 |
4661 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { | 4661 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { |
4662 CpuFeatureScope scope(masm(), SSE2); | 4662 CpuFeatureScope scope(masm(), SSE2); |
4663 XMMRegister value = ToDoubleRegister(instr->value()); | 4663 XMMRegister value = ToDoubleRegister(instr->value()); |
4664 | 4664 |
4665 if (instr->NeedsCanonicalization()) { | 4665 if (instr->NeedsCanonicalization()) { |
4666 Label have_value; | 4666 Label have_value; |
4667 | 4667 |
4668 __ ucomisd(value, value); | 4668 __ ucomisd(value, value); |
4669 __ j(parity_odd, &have_value); // NaN. | 4669 __ j(parity_odd, &have_value, Label::kNear); // NaN. |
4670 | 4670 |
4671 __ movsd(value, Operand::StaticVariable(canonical_nan_reference)); | 4671 __ movsd(value, Operand::StaticVariable(canonical_nan_reference)); |
4672 __ bind(&have_value); | 4672 __ bind(&have_value); |
4673 } | 4673 } |
4674 | 4674 |
4675 __ movsd(double_store_operand, value); | 4675 __ movsd(double_store_operand, value); |
4676 } else { | 4676 } else { |
4677 // Can't use SSE2 in the serializer | 4677 // Can't use SSE2 in the serializer |
4678 if (instr->hydrogen()->IsConstantHoleStore()) { | 4678 if (instr->hydrogen()->IsConstantHoleStore()) { |
4679 // This means we should store the (double) hole. No floating point | 4679 // This means we should store the (double) hole. No floating point |
(...skipping 15 matching lines...) Expand all Loading... | |
4695 } else { | 4695 } else { |
4696 Label no_special_nan_handling; | 4696 Label no_special_nan_handling; |
4697 X87Register value = ToX87Register(instr->value()); | 4697 X87Register value = ToX87Register(instr->value()); |
4698 X87Fxch(value); | 4698 X87Fxch(value); |
4699 | 4699 |
4700 if (instr->NeedsCanonicalization()) { | 4700 if (instr->NeedsCanonicalization()) { |
4701 __ fld(0); | 4701 __ fld(0); |
4702 __ fld(0); | 4702 __ fld(0); |
4703 __ FCmp(); | 4703 __ FCmp(); |
4704 | 4704 |
4705 __ j(parity_odd, &no_special_nan_handling); | 4705 __ j(parity_odd, &no_special_nan_handling, Label::kNear); |
4706 __ sub(esp, Immediate(kDoubleSize)); | 4706 __ sub(esp, Immediate(kDoubleSize)); |
4707 __ fst_d(MemOperand(esp, 0)); | 4707 __ fst_d(MemOperand(esp, 0)); |
4708 __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)), | 4708 __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)), |
4709 Immediate(kHoleNanUpper32)); | 4709 Immediate(kHoleNanUpper32)); |
4710 __ add(esp, Immediate(kDoubleSize)); | 4710 __ add(esp, Immediate(kDoubleSize)); |
4711 Label canonicalize; | 4711 Label canonicalize; |
4712 __ j(not_equal, &canonicalize); | 4712 __ j(not_equal, &canonicalize, Label::kNear); |
4713 __ jmp(&no_special_nan_handling); | 4713 __ jmp(&no_special_nan_handling, Label::kNear); |
4714 __ bind(&canonicalize); | 4714 __ bind(&canonicalize); |
4715 __ fstp(0); | 4715 __ fstp(0); |
4716 __ fld_d(Operand::StaticVariable(canonical_nan_reference)); | 4716 __ fld_d(Operand::StaticVariable(canonical_nan_reference)); |
4717 } | 4717 } |
4718 | 4718 |
4719 __ bind(&no_special_nan_handling); | 4719 __ bind(&no_special_nan_handling); |
4720 __ fst_d(double_store_operand); | 4720 __ fst_d(double_store_operand); |
4721 } | 4721 } |
4722 } | 4722 } |
4723 } | 4723 } |
(...skipping 948 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5672 DeferredCheckMaps* deferred = NULL; | 5672 DeferredCheckMaps* deferred = NULL; |
5673 if (instr->hydrogen()->has_migration_target()) { | 5673 if (instr->hydrogen()->has_migration_target()) { |
5674 deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); | 5674 deferred = new(zone()) DeferredCheckMaps(this, instr, reg, x87_stack_); |
5675 __ bind(deferred->check_maps()); | 5675 __ bind(deferred->check_maps()); |
5676 } | 5676 } |
5677 | 5677 |
5678 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); | 5678 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); |
5679 Label success; | 5679 Label success; |
5680 for (int i = 0; i < map_set.size() - 1; i++) { | 5680 for (int i = 0; i < map_set.size() - 1; i++) { |
5681 Handle<Map> map = map_set.at(i).handle(); | 5681 Handle<Map> map = map_set.at(i).handle(); |
5682 __ CompareMap(reg, map, &success); | 5682 __ CompareMap(reg, map); |
5683 __ j(equal, &success); | 5683 __ j(equal, &success, Label::kNear); |
5684 } | 5684 } |
5685 | 5685 |
5686 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); | 5686 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); |
5687 __ CompareMap(reg, map, &success); | 5687 __ CompareMap(reg, map); |
5688 if (instr->hydrogen()->has_migration_target()) { | 5688 if (instr->hydrogen()->has_migration_target()) { |
5689 __ j(not_equal, deferred->entry()); | 5689 __ j(not_equal, deferred->entry()); |
5690 } else { | 5690 } else { |
5691 DeoptimizeIf(not_equal, instr->environment()); | 5691 DeoptimizeIf(not_equal, instr->environment()); |
5692 } | 5692 } |
5693 | 5693 |
5694 __ bind(&success); | 5694 __ bind(&success); |
5695 } | 5695 } |
5696 | 5696 |
5697 | 5697 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5755 Register scratch2 = ToRegister(instr->scratch2()); | 5755 Register scratch2 = ToRegister(instr->scratch2()); |
5756 Register scratch3 = ToRegister(instr->scratch3()); | 5756 Register scratch3 = ToRegister(instr->scratch3()); |
5757 Label is_smi, done, heap_number, valid_exponent, | 5757 Label is_smi, done, heap_number, valid_exponent, |
5758 largest_value, zero_result, maybe_nan_or_infinity; | 5758 largest_value, zero_result, maybe_nan_or_infinity; |
5759 | 5759 |
5760 __ JumpIfSmi(input_reg, &is_smi); | 5760 __ JumpIfSmi(input_reg, &is_smi); |
5761 | 5761 |
5762 // Check for heap number | 5762 // Check for heap number |
5763 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5763 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
5764 factory()->heap_number_map()); | 5764 factory()->heap_number_map()); |
5765 __ j(equal, &heap_number, Label::kFar); | 5765 __ j(equal, &heap_number, Label::kNear); |
5766 | 5766 |
5767 // Check for undefined. Undefined is converted to zero for clamping | 5767 // Check for undefined. Undefined is converted to zero for clamping |
5768 // conversions. | 5768 // conversions. |
5769 __ cmp(input_reg, factory()->undefined_value()); | 5769 __ cmp(input_reg, factory()->undefined_value()); |
5770 DeoptimizeIf(not_equal, instr->environment()); | 5770 DeoptimizeIf(not_equal, instr->environment()); |
5771 __ jmp(&zero_result); | 5771 __ jmp(&zero_result, Label::kNear); |
5772 | 5772 |
5773 // Heap number | 5773 // Heap number |
5774 __ bind(&heap_number); | 5774 __ bind(&heap_number); |
5775 | 5775 |
5776 // Surprisingly, all of the hand-crafted bit-manipulations below are much | 5776 // Surprisingly, all of the hand-crafted bit-manipulations below are much |
5777 // faster than the x86 FPU built-in instruction, especially since "banker's | 5777 // faster than the x86 FPU built-in instruction, especially since "banker's |
5778 // rounding" would be additionally very expensive | 5778 // rounding" would be additionally very expensive |
5779 | 5779 |
5780 // Get exponent word. | 5780 // Get exponent word. |
5781 __ mov(scratch, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 5781 __ mov(scratch, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
5782 __ mov(scratch3, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 5782 __ mov(scratch3, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
5783 | 5783 |
5784 // Test for negative values --> clamp to zero | 5784 // Test for negative values --> clamp to zero |
5785 __ test(scratch, scratch); | 5785 __ test(scratch, scratch); |
5786 __ j(negative, &zero_result); | 5786 __ j(negative, &zero_result, Label::kNear); |
5787 | 5787 |
5788 // Get exponent alone in scratch2. | 5788 // Get exponent alone in scratch2. |
5789 __ mov(scratch2, scratch); | 5789 __ mov(scratch2, scratch); |
5790 __ and_(scratch2, HeapNumber::kExponentMask); | 5790 __ and_(scratch2, HeapNumber::kExponentMask); |
5791 __ shr(scratch2, HeapNumber::kExponentShift); | 5791 __ shr(scratch2, HeapNumber::kExponentShift); |
5792 __ j(zero, &zero_result); | 5792 __ j(zero, &zero_result, Label::kNear); |
5793 __ sub(scratch2, Immediate(HeapNumber::kExponentBias - 1)); | 5793 __ sub(scratch2, Immediate(HeapNumber::kExponentBias - 1)); |
5794 __ j(negative, &zero_result); | 5794 __ j(negative, &zero_result, Label::kNear); |
5795 | 5795 |
5796 const uint32_t non_int8_exponent = 7; | 5796 const uint32_t non_int8_exponent = 7; |
5797 __ cmp(scratch2, Immediate(non_int8_exponent + 1)); | 5797 __ cmp(scratch2, Immediate(non_int8_exponent + 1)); |
5798 // If the exponent is too big, check for special values. | 5798 // If the exponent is too big, check for special values. |
5799 __ j(greater, &maybe_nan_or_infinity, Label::kNear); | 5799 __ j(greater, &maybe_nan_or_infinity, Label::kNear); |
5800 | 5800 |
5801 __ bind(&valid_exponent); | 5801 __ bind(&valid_exponent); |
5802 // Exponent word in scratch, exponent in scratch2. We know that 0 <= exponent | 5802 // Exponent word in scratch, exponent in scratch2. We know that 0 <= exponent |
5803 // < 7. The shift bias is the number of bits to shift the mantissa such that | 5803 // < 7. The shift bias is the number of bits to shift the mantissa such that |
5804 // with an exponent of 7 such the that top-most one is in bit 30, allowing | 5804 // with an exponent of 7 such the that top-most one is in bit 30, allowing |
(...skipping 10 matching lines...) Expand all Loading... | |
5815 __ shl_cl(scratch); | 5815 __ shl_cl(scratch); |
5816 // Use "banker's rounding" to spec: If fractional part of number is 0.5, then | 5816 // Use "banker's rounding" to spec: If fractional part of number is 0.5, then |
5817 // use the bit in the "ones" place and add it to the "halves" place, which has | 5817 // use the bit in the "ones" place and add it to the "halves" place, which has |
5818 // the effect of rounding to even. | 5818 // the effect of rounding to even. |
5819 __ mov(scratch2, scratch); | 5819 __ mov(scratch2, scratch); |
5820 const uint32_t one_half_bit_shift = 30 - sizeof(uint8_t) * 8; | 5820 const uint32_t one_half_bit_shift = 30 - sizeof(uint8_t) * 8; |
5821 const uint32_t one_bit_shift = one_half_bit_shift + 1; | 5821 const uint32_t one_bit_shift = one_half_bit_shift + 1; |
5822 __ and_(scratch2, Immediate((1 << one_bit_shift) - 1)); | 5822 __ and_(scratch2, Immediate((1 << one_bit_shift) - 1)); |
5823 __ cmp(scratch2, Immediate(1 << one_half_bit_shift)); | 5823 __ cmp(scratch2, Immediate(1 << one_half_bit_shift)); |
5824 Label no_round; | 5824 Label no_round; |
5825 __ j(less, &no_round); | 5825 __ j(less, &no_round, Label::kNear); |
5826 Label round_up; | 5826 Label round_up; |
5827 __ mov(scratch2, Immediate(1 << one_half_bit_shift)); | 5827 __ mov(scratch2, Immediate(1 << one_half_bit_shift)); |
5828 __ j(greater, &round_up); | 5828 __ j(greater, &round_up, Label::kNear); |
5829 __ test(scratch3, scratch3); | 5829 __ test(scratch3, scratch3); |
5830 __ j(not_zero, &round_up); | 5830 __ j(not_zero, &round_up, Label::kNear); |
5831 __ mov(scratch2, scratch); | 5831 __ mov(scratch2, scratch); |
5832 __ and_(scratch2, Immediate(1 << one_bit_shift)); | 5832 __ and_(scratch2, Immediate(1 << one_bit_shift)); |
5833 __ shr(scratch2, 1); | 5833 __ shr(scratch2, 1); |
5834 __ bind(&round_up); | 5834 __ bind(&round_up); |
5835 __ add(scratch, scratch2); | 5835 __ add(scratch, scratch2); |
5836 __ j(overflow, &largest_value); | 5836 __ j(overflow, &largest_value, Label::kNear); |
5837 __ bind(&no_round); | 5837 __ bind(&no_round); |
5838 __ shr(scratch, 23); | 5838 __ shr(scratch, 23); |
5839 __ mov(result_reg, scratch); | 5839 __ mov(result_reg, scratch); |
5840 __ jmp(&done, Label::kNear); | 5840 __ jmp(&done, Label::kNear); |
5841 | 5841 |
5842 __ bind(&maybe_nan_or_infinity); | 5842 __ bind(&maybe_nan_or_infinity); |
5843 // Check for NaN/Infinity, all other values map to 255 | 5843 // Check for NaN/Infinity, all other values map to 255 |
5844 __ cmp(scratch2, Immediate(HeapNumber::kInfinityOrNanExponent + 1)); | 5844 __ cmp(scratch2, Immediate(HeapNumber::kInfinityOrNanExponent + 1)); |
5845 __ j(not_equal, &largest_value, Label::kNear); | 5845 __ j(not_equal, &largest_value, Label::kNear); |
5846 | 5846 |
5847 // Check for NaN, which differs from Infinity in that at least one mantissa | 5847 // Check for NaN, which differs from Infinity in that at least one mantissa |
5848 // bit is set. | 5848 // bit is set. |
5849 __ and_(scratch, HeapNumber::kMantissaMask); | 5849 __ and_(scratch, HeapNumber::kMantissaMask); |
5850 __ or_(scratch, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); | 5850 __ or_(scratch, FieldOperand(input_reg, HeapNumber::kMantissaOffset)); |
5851 __ j(not_zero, &zero_result); // M!=0 --> NaN | 5851 __ j(not_zero, &zero_result, Label::kNear); // M!=0 --> NaN |
5852 // Infinity -> Fall through to map to 255. | 5852 // Infinity -> Fall through to map to 255. |
5853 | 5853 |
5854 __ bind(&largest_value); | 5854 __ bind(&largest_value); |
5855 __ mov(result_reg, Immediate(255)); | 5855 __ mov(result_reg, Immediate(255)); |
5856 __ jmp(&done, Label::kNear); | 5856 __ jmp(&done, Label::kNear); |
5857 | 5857 |
5858 __ bind(&zero_result); | 5858 __ bind(&zero_result); |
5859 __ xor_(result_reg, result_reg); | 5859 __ xor_(result_reg, result_reg); |
5860 __ jmp(&done); | 5860 __ jmp(&done, Label::kNear); |
5861 | 5861 |
5862 // smi | 5862 // smi |
5863 __ bind(&is_smi); | 5863 __ bind(&is_smi); |
5864 if (!input_reg.is(result_reg)) { | 5864 if (!input_reg.is(result_reg)) { |
5865 __ mov(result_reg, input_reg); | 5865 __ mov(result_reg, input_reg); |
5866 } | 5866 } |
5867 __ SmiUntag(result_reg); | 5867 __ SmiUntag(result_reg); |
5868 __ ClampUint8(result_reg); | 5868 __ ClampUint8(result_reg); |
5869 __ bind(&done); | 5869 __ bind(&done); |
5870 } | 5870 } |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5998 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 5998 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
5999 __ push(Immediate(instr->hydrogen()->pattern())); | 5999 __ push(Immediate(instr->hydrogen()->pattern())); |
6000 __ push(Immediate(instr->hydrogen()->flags())); | 6000 __ push(Immediate(instr->hydrogen()->flags())); |
6001 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 6001 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); |
6002 __ mov(ebx, eax); | 6002 __ mov(ebx, eax); |
6003 | 6003 |
6004 __ bind(&materialized); | 6004 __ bind(&materialized); |
6005 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 6005 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
6006 Label allocated, runtime_allocate; | 6006 Label allocated, runtime_allocate; |
6007 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 6007 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
6008 __ jmp(&allocated); | 6008 __ jmp(&allocated, Label::kNear); |
6009 | 6009 |
6010 __ bind(&runtime_allocate); | 6010 __ bind(&runtime_allocate); |
6011 __ push(ebx); | 6011 __ push(ebx); |
6012 __ push(Immediate(Smi::FromInt(size))); | 6012 __ push(Immediate(Smi::FromInt(size))); |
6013 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 6013 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
6014 __ pop(ebx); | 6014 __ pop(ebx); |
6015 | 6015 |
6016 __ bind(&allocated); | 6016 __ bind(&allocated); |
6017 // Copy the content into the newly allocated memory. | 6017 // Copy the content into the newly allocated memory. |
6018 // (Unroll copy loop once for better throughput). | 6018 // (Unroll copy loop once for better throughput). |
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6324 __ bind(&use_cache); | 6324 __ bind(&use_cache); |
6325 } | 6325 } |
6326 | 6326 |
6327 | 6327 |
6328 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 6328 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
6329 Register map = ToRegister(instr->map()); | 6329 Register map = ToRegister(instr->map()); |
6330 Register result = ToRegister(instr->result()); | 6330 Register result = ToRegister(instr->result()); |
6331 Label load_cache, done; | 6331 Label load_cache, done; |
6332 __ EnumLength(result, map); | 6332 __ EnumLength(result, map); |
6333 __ cmp(result, Immediate(Smi::FromInt(0))); | 6333 __ cmp(result, Immediate(Smi::FromInt(0))); |
6334 __ j(not_equal, &load_cache); | 6334 __ j(not_equal, &load_cache, Label::kNear); |
6335 __ mov(result, isolate()->factory()->empty_fixed_array()); | 6335 __ mov(result, isolate()->factory()->empty_fixed_array()); |
6336 __ jmp(&done); | 6336 __ jmp(&done, Label::kNear); |
6337 | 6337 |
6338 __ bind(&load_cache); | 6338 __ bind(&load_cache); |
6339 __ LoadInstanceDescriptors(map, result); | 6339 __ LoadInstanceDescriptors(map, result); |
6340 __ mov(result, | 6340 __ mov(result, |
6341 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 6341 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
6342 __ mov(result, | 6342 __ mov(result, |
6343 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 6343 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
6344 __ bind(&done); | 6344 __ bind(&done); |
6345 __ test(result, result); | 6345 __ test(result, result); |
6346 DeoptimizeIf(equal, instr->environment()); | 6346 DeoptimizeIf(equal, instr->environment()); |
6347 } | 6347 } |
6348 | 6348 |
6349 | 6349 |
6350 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 6350 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
6351 Register object = ToRegister(instr->value()); | 6351 Register object = ToRegister(instr->value()); |
6352 __ cmp(ToRegister(instr->map()), | 6352 __ cmp(ToRegister(instr->map()), |
6353 FieldOperand(object, HeapObject::kMapOffset)); | 6353 FieldOperand(object, HeapObject::kMapOffset)); |
6354 DeoptimizeIf(not_equal, instr->environment()); | 6354 DeoptimizeIf(not_equal, instr->environment()); |
6355 } | 6355 } |
6356 | 6356 |
6357 | 6357 |
6358 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 6358 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
6359 Register object = ToRegister(instr->object()); | 6359 Register object = ToRegister(instr->object()); |
6360 Register index = ToRegister(instr->index()); | 6360 Register index = ToRegister(instr->index()); |
6361 | 6361 |
6362 Label out_of_object, done; | 6362 Label out_of_object, done; |
6363 __ cmp(index, Immediate(0)); | 6363 __ cmp(index, Immediate(0)); |
6364 __ j(less, &out_of_object); | 6364 __ j(less, &out_of_object, Label::kNear); |
6365 __ mov(object, FieldOperand(object, | 6365 __ mov(object, FieldOperand(object, |
6366 index, | 6366 index, |
6367 times_half_pointer_size, | 6367 times_half_pointer_size, |
6368 JSObject::kHeaderSize)); | 6368 JSObject::kHeaderSize)); |
6369 __ jmp(&done, Label::kNear); | 6369 __ jmp(&done, Label::kNear); |
6370 | 6370 |
6371 __ bind(&out_of_object); | 6371 __ bind(&out_of_object); |
6372 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 6372 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
6373 __ neg(index); | 6373 __ neg(index); |
6374 // Index is now equal to out of object property index plus 1. | 6374 // Index is now equal to out of object property index plus 1. |
6375 __ mov(object, FieldOperand(object, | 6375 __ mov(object, FieldOperand(object, |
6376 index, | 6376 index, |
6377 times_half_pointer_size, | 6377 times_half_pointer_size, |
6378 FixedArray::kHeaderSize - kPointerSize)); | 6378 FixedArray::kHeaderSize - kPointerSize)); |
6379 __ bind(&done); | 6379 __ bind(&done); |
6380 } | 6380 } |
6381 | 6381 |
6382 | 6382 |
6383 #undef __ | 6383 #undef __ |
6384 | 6384 |
6385 } } // namespace v8::internal | 6385 } } // namespace v8::internal |
6386 | 6386 |
6387 #endif // V8_TARGET_ARCH_IA32 | 6387 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |