OLD | NEW |
| (Empty) |
1 .hidden __hwcap | |
2 | |
3 .global feclearexcept | |
4 .type feclearexcept,@function | |
5 feclearexcept: | |
6 mov 4(%esp),%ecx | |
7 and $0x3f,%ecx | |
8 fnstsw %ax | |
9 # consider sse fenv as well if the cpu has XMM capability | |
10 call 1f | |
11 1: addl $__hwcap-1b,(%esp) | |
12 pop %edx | |
13 testl $0x02000000,(%edx) | |
14 jz 2f | |
15 # maintain exceptions in the sse mxcsr, clear x87 exceptions | |
16 test %eax,%ecx | |
17 jz 1f | |
18 fnclex | |
19 1: push %edx | |
20 stmxcsr (%esp) | |
21 pop %edx | |
22 and $0x3f,%eax | |
23 or %eax,%edx | |
24 test %edx,%ecx | |
25 jz 1f | |
26 not %ecx | |
27 and %ecx,%edx | |
28 push %edx | |
29 ldmxcsr (%esp) | |
30 pop %edx | |
31 1: xor %eax,%eax | |
32 ret | |
33 # only do the expensive x87 fenv load/store when needed | |
34 2: test %eax,%ecx | |
35 jz 1b | |
36 not %ecx | |
37 and %ecx,%eax | |
38 test $0x3f,%eax | |
39 jz 1f | |
40 fnclex | |
41 jmp 1b | |
42 1: sub $32,%esp | |
43 fnstenv (%esp) | |
44 mov %al,4(%esp) | |
45 fldenv (%esp) | |
46 add $32,%esp | |
47 xor %eax,%eax | |
48 ret | |
49 | |
50 .global feraiseexcept | |
51 .type feraiseexcept,@function | |
52 feraiseexcept: | |
53 mov 4(%esp),%eax | |
54 and $0x3f,%eax | |
55 sub $32,%esp | |
56 fnstenv (%esp) | |
57 or %al,4(%esp) | |
58 fldenv (%esp) | |
59 add $32,%esp | |
60 xor %eax,%eax | |
61 ret | |
62 | |
63 .global __fesetround | |
64 .type __fesetround,@function | |
65 __fesetround: | |
66 mov 4(%esp),%ecx | |
67 push %eax | |
68 xor %eax,%eax | |
69 fnstcw (%esp) | |
70 andb $0xf3,1(%esp) | |
71 or %ch,1(%esp) | |
72 fldcw (%esp) | |
73 # consider sse fenv as well if the cpu has XMM capability | |
74 call 1f | |
75 1: addl $__hwcap-1b,(%esp) | |
76 pop %edx | |
77 testl $0x02000000,(%edx) | |
78 jz 1f | |
79 stmxcsr (%esp) | |
80 shl $3,%ch | |
81 andb $0x9f,1(%esp) | |
82 or %ch,1(%esp) | |
83 ldmxcsr (%esp) | |
84 1: pop %ecx | |
85 ret | |
86 | |
87 .global fegetround | |
88 .type fegetround,@function | |
89 fegetround: | |
90 push %eax | |
91 fnstcw (%esp) | |
92 pop %eax | |
93 and $0xc00,%eax | |
94 ret | |
95 | |
96 .global fegetenv | |
97 .type fegetenv,@function | |
98 fegetenv: | |
99 mov 4(%esp),%ecx | |
100 xor %eax,%eax | |
101 fnstenv (%ecx) | |
102 # consider sse fenv as well if the cpu has XMM capability | |
103 call 1f | |
104 1: addl $__hwcap-1b,(%esp) | |
105 pop %edx | |
106 testl $0x02000000,(%edx) | |
107 jz 1f | |
108 push %eax | |
109 stmxcsr (%esp) | |
110 pop %edx | |
111 and $0x3f,%edx | |
112 or %edx,4(%ecx) | |
113 1: ret | |
114 | |
115 .global fesetenv | |
116 .type fesetenv,@function | |
117 fesetenv: | |
118 mov 4(%esp),%ecx | |
119 xor %eax,%eax | |
120 inc %ecx | |
121 jz 1f | |
122 fldenv -1(%ecx) | |
123 movl -1(%ecx),%ecx | |
124 jmp 2f | |
125 1: push %eax | |
126 push %eax | |
127 push %eax | |
128 push %eax | |
129 pushl $0xffff | |
130 push %eax | |
131 pushl $0x37f | |
132 fldenv (%esp) | |
133 add $28,%esp | |
134 # consider sse fenv as well if the cpu has XMM capability | |
135 2: call 1f | |
136 1: addl $__hwcap-1b,(%esp) | |
137 pop %edx | |
138 testl $0x02000000,(%edx) | |
139 jz 1f | |
140 # mxcsr := same rounding mode, cleared exceptions, default mask | |
141 and $0xc00,%ecx | |
142 shl $3,%ecx | |
143 or $0x1f80,%ecx | |
144 mov %ecx,4(%esp) | |
145 ldmxcsr 4(%esp) | |
146 1: ret | |
147 | |
148 .global fetestexcept | |
149 .type fetestexcept,@function | |
150 fetestexcept: | |
151 mov 4(%esp),%ecx | |
152 and $0x3f,%ecx | |
153 fnstsw %ax | |
154 # consider sse fenv as well if the cpu has XMM capability | |
155 call 1f | |
156 1: addl $__hwcap-1b,(%esp) | |
157 pop %edx | |
158 testl $0x02000000,(%edx) | |
159 jz 1f | |
160 stmxcsr 4(%esp) | |
161 or 4(%esp),%eax | |
162 1: and %ecx,%eax | |
163 ret | |
OLD | NEW |