Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(94)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 6903124: Removed dead code: GenericUnaryOpStub is not used anymore, as a consequence, NegativeZeroHandling... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/codegen.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2737 matching lines...) Expand 10 before | Expand all | Expand 10 after
2748 __ bind(&done); 2748 __ bind(&done);
2749 } 2749 }
2750 2750
2751 2751
2752 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm, 2752 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
2753 Label* non_int32) { 2753 Label* non_int32) {
2754 return; 2754 return;
2755 } 2755 }
2756 2756
2757 2757
2758 void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
2759 Label slow, done, undo;
2760
2761 if (op_ == Token::SUB) {
2762 if (include_smi_code_) {
2763 // Check whether the value is a smi.
2764 NearLabel try_float;
2765 __ test(eax, Immediate(kSmiTagMask));
2766 __ j(not_zero, &try_float, not_taken);
2767
2768 if (negative_zero_ == kStrictNegativeZero) {
2769 // Go slow case if the value of the expression is zero
2770 // to make sure that we switch between 0 and -0.
2771 __ test(eax, Operand(eax));
2772 __ j(zero, &slow, not_taken);
2773 }
2774
2775 // The value of the expression is a smi that is not zero. Try
2776 // optimistic subtraction '0 - value'.
2777 __ mov(edx, Operand(eax));
2778 __ Set(eax, Immediate(0));
2779 __ sub(eax, Operand(edx));
2780 __ j(overflow, &undo, not_taken);
2781 __ StubReturn(1);
2782
2783 // Try floating point case.
2784 __ bind(&try_float);
2785 } else if (FLAG_debug_code) {
2786 __ AbortIfSmi(eax);
2787 }
2788
2789 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
2790 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
2791 __ j(not_equal, &slow);
2792 if (overwrite_ == UNARY_OVERWRITE) {
2793 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
2794 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
2795 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
2796 } else {
2797 __ mov(edx, Operand(eax));
2798 // edx: operand
2799 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
2800 // eax: allocated 'empty' number
2801 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
2802 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
2803 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
2804 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
2805 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
2806 }
2807 } else if (op_ == Token::BIT_NOT) {
2808 if (include_smi_code_) {
2809 Label non_smi;
2810 __ test(eax, Immediate(kSmiTagMask));
2811 __ j(not_zero, &non_smi);
2812 __ not_(eax);
2813 __ and_(eax, ~kSmiTagMask); // Remove inverted smi-tag.
2814 __ ret(0);
2815 __ bind(&non_smi);
2816 } else if (FLAG_debug_code) {
2817 __ AbortIfSmi(eax);
2818 }
2819
2820 // Check if the operand is a heap number.
2821 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
2822 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
2823 __ j(not_equal, &slow, not_taken);
2824
2825 // Convert the heap number in eax to an untagged integer in ecx.
2826 IntegerConvert(masm,
2827 eax,
2828 TypeInfo::Unknown(),
2829 CpuFeatures::IsSupported(SSE3),
2830 &slow);
2831
2832 // Do the bitwise operation and check if the result fits in a smi.
2833 NearLabel try_float;
2834 __ not_(ecx);
2835 __ cmp(ecx, 0xc0000000);
2836 __ j(sign, &try_float, not_taken);
2837
2838 // Tag the result as a smi and we're done.
2839 STATIC_ASSERT(kSmiTagSize == 1);
2840 __ lea(eax, Operand(ecx, times_2, kSmiTag));
2841 __ jmp(&done);
2842
2843 // Try to store the result in a heap number.
2844 __ bind(&try_float);
2845 if (overwrite_ == UNARY_NO_OVERWRITE) {
2846 // Allocate a fresh heap number, but don't overwrite eax until
2847 // we're sure we can do it without going through the slow case
2848 // that needs the value in eax.
2849 __ AllocateHeapNumber(ebx, edx, edi, &slow);
2850 __ mov(eax, Operand(ebx));
2851 }
2852 if (CpuFeatures::IsSupported(SSE2)) {
2853 CpuFeatures::Scope use_sse2(SSE2);
2854 __ cvtsi2sd(xmm0, Operand(ecx));
2855 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2856 } else {
2857 __ push(ecx);
2858 __ fild_s(Operand(esp, 0));
2859 __ pop(ecx);
2860 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
2861 }
2862 } else {
2863 UNIMPLEMENTED();
2864 }
2865
2866 // Return from the stub.
2867 __ bind(&done);
2868 __ StubReturn(1);
2869
2870 // Restore eax and go slow case.
2871 __ bind(&undo);
2872 __ mov(eax, Operand(edx));
2873
2874 // Handle the slow case by jumping to the JavaScript builtin.
2875 __ bind(&slow);
2876 __ pop(ecx); // pop return address.
2877 __ push(eax);
2878 __ push(ecx); // push return address
2879 switch (op_) {
2880 case Token::SUB:
2881 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
2882 break;
2883 case Token::BIT_NOT:
2884 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
2885 break;
2886 default:
2887 UNREACHABLE();
2888 }
2889 }
2890
2891
2892 void MathPowStub::Generate(MacroAssembler* masm) { 2758 void MathPowStub::Generate(MacroAssembler* masm) {
2893 // Registers are used as follows: 2759 // Registers are used as follows:
2894 // edx = base 2760 // edx = base
2895 // eax = exponent 2761 // eax = exponent
2896 // ecx = temporary, result 2762 // ecx = temporary, result
2897 2763
2898 CpuFeatures::Scope use_sse2(SSE2); 2764 CpuFeatures::Scope use_sse2(SSE2);
2899 Label allocate_return, call_runtime; 2765 Label allocate_return, call_runtime;
2900 2766
2901 // Load input parameters. 2767 // Load input parameters.
(...skipping 3090 matching lines...) Expand 10 before | Expand all | Expand 10 after
5992 // Do a tail call to the rewritten stub. 5858 // Do a tail call to the rewritten stub.
5993 __ jmp(Operand(edi)); 5859 __ jmp(Operand(edi));
5994 } 5860 }
5995 5861
5996 5862
5997 #undef __ 5863 #undef __
5998 5864
5999 } } // namespace v8::internal 5865 } } // namespace v8::internal
6000 5866
6001 #endif // V8_TARGET_ARCH_IA32 5867 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/codegen.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698