Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(245)

Side by Side Diff: src/compiler/code-stub-assembler.cc

Issue 1828253002: [builtins] Provide Math.floor as TurboFan builtin. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: REBASE. Address feedback. Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/code-stub-assembler.h ('k') | src/compiler/js-builtin-reducer.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-stub-assembler.h" 5 #include "src/compiler/code-stub-assembler.h"
6 6
7 #include <ostream> 7 #include <ostream>
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/compiler/graph.h" 10 #include "src/compiler/graph.h"
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
145 } 145 }
146 146
147 Node* CodeStubAssembler::LoadStackPointer() { 147 Node* CodeStubAssembler::LoadStackPointer() {
148 return raw_assembler_->LoadStackPointer(); 148 return raw_assembler_->LoadStackPointer();
149 } 149 }
150 150
151 Node* CodeStubAssembler::SmiShiftBitsConstant() { 151 Node* CodeStubAssembler::SmiShiftBitsConstant() {
152 return IntPtrConstant(kSmiShiftSize + kSmiTagSize); 152 return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
153 } 153 }
154 154
155 Node* CodeStubAssembler::Float64Floor(Node* x) {
156 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) {
157 return raw_assembler_->Float64RoundDown(x);
158 }
159
160 Node* two_52 = Float64Constant(4503599627370496.0E0);
161 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
162
163 Variable var_x(this, MachineRepresentation::kFloat64);
164 var_x.Bind(x);
165
166 Label return_x(this);
167
168 // Check if {x} is a large positive integer.
169 Label if_xlargeposint(this), if_xnotlargeposint(this);
170 Branch(Float64GreaterThanOrEqual(x, two_52), &if_xlargeposint,
171 &if_xnotlargeposint);
172
173 Bind(&if_xlargeposint);
174 {
175 // The {x} is already an even integer.
176 Goto(&return_x);
177 }
178
179 Bind(&if_xnotlargeposint);
180 {
181 // Check if {x} is negative.
182 Label if_xnegative(this), if_xpositive(this);
183 Branch(Float64LessThan(x, Float64Constant(0.0)), &if_xnegative,
184 &if_xpositive);
185
186 Bind(&if_xnegative);
187 {
188 // Check if {x} is a large negative integer.
189 Label if_xlargenegint(this), if_xnotlargenegint(this);
190 Branch(Float64LessThanOrEqual(x, minus_two_52), &if_xlargenegint,
191 &if_xnotlargenegint);
192
193 Bind(&if_xlargenegint);
194 {
195 // The {x} is already an even integer.
196 Goto(&return_x);
197 }
198
199 Bind(&if_xnotlargenegint);
200 {
201 // Round negative {x} towards -Infinity.
202 Node* z = Float64Sub(Float64Constant(-0.0), x);
203 Node* y = Float64Sub(Float64Add(two_52, z), two_52);
204
205 // Check if we need to adjust {y}.
206 Label if_adjust(this), if_notadjust(this);
207 Branch(Float64GreaterThan(z, y), &if_adjust, &if_notadjust);
208
209 Bind(&if_adjust);
210 {
211 var_x.Bind(Float64Sub(Float64Constant(-1.0), y));
212 Goto(&return_x);
213 }
214
215 Bind(&if_notadjust);
216 {
217 var_x.Bind(Float64Sub(Float64Constant(-0.0), y));
218 Goto(&return_x);
219 }
220 }
221 }
222
223 Bind(&if_xpositive);
224 {
225 // Check if {x} is zero (either positive or negative).
226 Label if_xzero(this), if_xnotzero(this);
227 Branch(Float64Equal(x, Float64Constant(0.0)), &if_xzero, &if_xnotzero);
228
229 Bind(&if_xzero);
230 {
231 // We have to return both 0.0 and -0.0 as is.
232 Goto(&return_x);
233 }
234
235 Bind(&if_xnotzero);
236 {
237 // Round positive {x} towards -Infinity.
238 Node* y = Float64Sub(Float64Add(two_52, x), two_52);
239
240 // Check if we need to adjust {y}.
241 Label if_adjust(this), if_notadjust(this);
242 Branch(Float64LessThan(x, y), &if_adjust, &if_notadjust);
243
244 Bind(&if_adjust);
245 {
246 var_x.Bind(Float64Sub(y, Float64Constant(1.0)));
247 Goto(&return_x);
248 }
249
250 Bind(&if_notadjust);
251 {
252 var_x.Bind(y);
253 Goto(&return_x);
254 }
255 }
256 }
257 }
258
259 Bind(&return_x);
260 return var_x.value();
261 }
155 262
156 Node* CodeStubAssembler::SmiTag(Node* value) { 263 Node* CodeStubAssembler::SmiTag(Node* value) {
157 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); 264 return raw_assembler_->WordShl(value, SmiShiftBitsConstant());
158 } 265 }
159 266
160 Node* CodeStubAssembler::SmiUntag(Node* value) { 267 Node* CodeStubAssembler::SmiUntag(Node* value) {
161 return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); 268 return raw_assembler_->WordSar(value, SmiShiftBitsConstant());
162 } 269 }
163 270
164 Node* CodeStubAssembler::SmiToWord32(Node* value) { 271 Node* CodeStubAssembler::SmiToWord32(Node* value) {
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after
503 return LoadMapInstanceType(LoadMap(object)); 610 return LoadMapInstanceType(LoadMap(object));
504 } 611 }
505 612
506 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, 613 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
507 uint32_t mask) { 614 uint32_t mask) {
508 return raw_assembler_->Word32Shr( 615 return raw_assembler_->Word32Shr(
509 raw_assembler_->Word32And(word32, raw_assembler_->Int32Constant(mask)), 616 raw_assembler_->Word32And(word32, raw_assembler_->Int32Constant(mask)),
510 raw_assembler_->Int32Constant(shift)); 617 raw_assembler_->Int32Constant(shift));
511 } 618 }
512 619
620 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
621 Node* value32 = raw_assembler_->TruncateFloat64ToInt32(
622 TruncationMode::kRoundToZero, value);
623 Node* value64 = ChangeInt32ToFloat64(value32);
624
625 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
626
627 Label if_valueisequal(this), if_valueisnotequal(this);
628 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
629 Bind(&if_valueisequal);
630 {
631 Label if_valueiszero(this), if_valueisnotzero(this);
632 Branch(Float64Equal(value, Float64Constant(0.0)), &if_valueiszero,
633 &if_valueisnotzero);
634
635 Bind(&if_valueiszero);
636 BranchIfInt32LessThan(raw_assembler_->Float64ExtractHighWord32(value),
637 Int32Constant(0), &if_valueisheapnumber,
638 &if_valueisint32);
639
640 Bind(&if_valueisnotzero);
641 Goto(&if_valueisint32);
642 }
643 Bind(&if_valueisnotequal);
644 Goto(&if_valueisheapnumber);
645
646 Variable var_result(this, MachineRepresentation::kTagged);
647 Bind(&if_valueisint32);
648 {
649 if (raw_assembler_->machine()->Is64()) {
650 Node* result = SmiTag(ChangeInt32ToInt64(value32));
651 var_result.Bind(result);
652 Goto(&if_join);
653 } else {
654 Node* pair = Int32AddWithOverflow(value32, value32);
655 Node* overflow = Projection(1, pair);
656 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
657 if_join(this);
658 Branch(overflow, &if_overflow, &if_notoverflow);
659 Bind(&if_overflow);
660 Goto(&if_valueisheapnumber);
661 Bind(&if_notoverflow);
662 {
663 Node* result = Projection(0, pair);
664 var_result.Bind(result);
665 Goto(&if_join);
666 }
667 }
668 }
669 Bind(&if_valueisheapnumber);
670 {
671 Node* result = AllocateHeapNumberWithValue(value);
672 var_result.Bind(result);
673 Goto(&if_join);
674 }
675 Bind(&if_join);
676 return var_result.value();
677 }
678
513 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { 679 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
514 if (raw_assembler_->machine()->Is64()) { 680 if (raw_assembler_->machine()->Is64()) {
515 return SmiTag(ChangeInt32ToInt64(value)); 681 return SmiTag(ChangeInt32ToInt64(value));
516 } 682 }
517 Variable var_result(this, MachineRepresentation::kTagged); 683 Variable var_result(this, MachineRepresentation::kTagged);
518 Node* pair = Int32AddWithOverflow(value, value); 684 Node* pair = Int32AddWithOverflow(value, value);
519 Node* overflow = Projection(1, pair); 685 Node* overflow = Projection(1, pair);
520 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), 686 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
521 if_join(this); 687 if_join(this);
522 Branch(overflow, &if_overflow, &if_notoverflow); 688 Branch(overflow, &if_overflow, &if_notoverflow);
(...skipping 527 matching lines...) Expand 10 before | Expand all | Expand 10 after
1050 } 1216 }
1051 } 1217 }
1052 } 1218 }
1053 1219
1054 bound_ = true; 1220 bound_ = true;
1055 } 1221 }
1056 1222
1057 } // namespace compiler 1223 } // namespace compiler
1058 } // namespace internal 1224 } // namespace internal
1059 } // namespace v8 1225 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/code-stub-assembler.h ('k') | src/compiler/js-builtin-reducer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698