OLD | NEW |
---|---|
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
8 #include "src/frames.h" | 8 #include "src/frames.h" |
9 #include "src/ic/stub-cache.h" | 9 #include "src/ic/stub-cache.h" |
10 | 10 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
46 } | 46 } |
47 | 47 |
48 Node* CodeStubAssembler::HeapNumberMapConstant() { | 48 Node* CodeStubAssembler::HeapNumberMapConstant() { |
49 return HeapConstant(isolate()->factory()->heap_number_map()); | 49 return HeapConstant(isolate()->factory()->heap_number_map()); |
50 } | 50 } |
51 | 51 |
52 Node* CodeStubAssembler::NoContextConstant() { | 52 Node* CodeStubAssembler::NoContextConstant() { |
53 return SmiConstant(Smi::FromInt(0)); | 53 return SmiConstant(Smi::FromInt(0)); |
54 } | 54 } |
55 | 55 |
56 Node* CodeStubAssembler::MinusZeroConstant() { | |
57 return LoadRoot(Heap::kMinusZeroValueRootIndex); | |
58 } | |
59 | |
60 Node* CodeStubAssembler::NanConstant() { | |
61 return LoadRoot(Heap::kNanValueRootIndex); | |
62 } | |
63 | |
56 Node* CodeStubAssembler::NullConstant() { | 64 Node* CodeStubAssembler::NullConstant() { |
57 return LoadRoot(Heap::kNullValueRootIndex); | 65 return LoadRoot(Heap::kNullValueRootIndex); |
58 } | 66 } |
59 | 67 |
60 Node* CodeStubAssembler::UndefinedConstant() { | 68 Node* CodeStubAssembler::UndefinedConstant() { |
61 return LoadRoot(Heap::kUndefinedValueRootIndex); | 69 return LoadRoot(Heap::kUndefinedValueRootIndex); |
62 } | 70 } |
63 | 71 |
64 Node* CodeStubAssembler::TheHoleConstant() { | 72 Node* CodeStubAssembler::TheHoleConstant() { |
65 return LoadRoot(Heap::kTheHoleValueRootIndex); | 73 return LoadRoot(Heap::kTheHoleValueRootIndex); |
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
323 Bind(&if_a); | 331 Bind(&if_a); |
324 min.Bind(a); | 332 min.Bind(a); |
325 Goto(&join); | 333 Goto(&join); |
326 Bind(&if_b); | 334 Bind(&if_b); |
327 min.Bind(b); | 335 min.Bind(b); |
328 Goto(&join); | 336 Goto(&join); |
329 Bind(&join); | 337 Bind(&join); |
330 return min.value(); | 338 return min.value(); |
331 } | 339 } |
332 | 340 |
341 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { | |
342 Variable var_result(this, MachineRepresentation::kTagged); | |
343 Label return_result(this, &var_result), | |
344 return_minuszero(this, Label::kDeferred), | |
345 return_nan(this, Label::kDeferred); | |
346 | |
347 // Untag {a} and {b}. | |
348 a = SmiToWord32(a); | |
349 b = SmiToWord32(b); | |
350 | |
351 // Return NaN if {b} is zero. | |
352 GotoIf(Word32Equal(b, Int32Constant(0)), &return_nan); | |
353 | |
354 // Check if {a} is positive (or zero). | |
Jarin
2016/07/11 04:47:20
We call this non-negative.
Benedikt Meurer
2016/07/11 05:03:22
Done.
| |
355 Label if_aispositive(this), if_aisnotpositive(this, Label::kDeferred); | |
356 Branch(Int32LessThanOrEqual(Int32Constant(0), a), &if_aispositive, | |
357 &if_aisnotpositive); | |
358 | |
359 Bind(&if_aispositive); | |
360 { | |
361 // Fast case, don't need to check any other edge cases. | |
362 Node* r = Int32Mod(a, b); | |
363 var_result.Bind(SmiFromWord32(r)); | |
364 Goto(&return_result); | |
365 } | |
366 | |
367 Bind(&if_aisnotpositive); | |
Jarin
2016/07/11 04:47:20
notpositive -> negative
Benedikt Meurer
2016/07/11 05:03:22
Done.
| |
368 { | |
369 if (SmiValuesAre32Bits()) { | |
370 // Check if {a} is kMinInt and {b} is -1 (only relevant if the | |
371 // kMinInt is actually representable as a Smi). | |
372 Label join(this); | |
373 GotoUnless(Word32Equal(a, Int32Constant(kMinInt)), &join); | |
374 GotoIf(Word32Equal(b, Int32Constant(-1)), &return_minuszero); | |
375 Goto(&join); | |
376 Bind(&join); | |
377 } | |
378 | |
379 // Perform the integer modulus operation. | |
380 Node* r = Int32Mod(a, b); | |
381 | |
382 // Check if {r} is zero, and if so return -0, because we have to | |
383 // take the sign of the left hand side {a}, which is negative. | |
384 GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero); | |
385 | |
386 // The remainder {r} can be outside the valid Smi range on 32bit | |
387 // architectures, so we cannot just say SmiFromWord32(r) here. | |
388 var_result.Bind(ChangeInt32ToTagged(r)); | |
389 Goto(&return_result); | |
390 } | |
391 | |
392 Bind(&return_minuszero); | |
393 var_result.Bind(MinusZeroConstant()); | |
394 Goto(&return_result); | |
395 | |
396 Bind(&return_nan); | |
397 var_result.Bind(NanConstant()); | |
398 Goto(&return_result); | |
399 | |
400 Bind(&return_result); | |
401 return var_result.value(); | |
402 } | |
403 | |
333 Node* CodeStubAssembler::WordIsSmi(Node* a) { | 404 Node* CodeStubAssembler::WordIsSmi(Node* a) { |
334 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0)); | 405 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0)); |
335 } | 406 } |
336 | 407 |
337 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { | 408 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { |
338 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), | 409 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), |
339 IntPtrConstant(0)); | 410 IntPtrConstant(0)); |
340 } | 411 } |
341 | 412 |
342 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, | 413 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
(...skipping 2511 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2854 } | 2925 } |
2855 Bind(&miss); | 2926 Bind(&miss); |
2856 { | 2927 { |
2857 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot, | 2928 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot, |
2858 p->vector); | 2929 p->vector); |
2859 } | 2930 } |
2860 } | 2931 } |
2861 | 2932 |
2862 } // namespace internal | 2933 } // namespace internal |
2863 } // namespace v8 | 2934 } // namespace v8 |
OLD | NEW |