OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
517 case Token::BIT_NOT: | 517 case Token::BIT_NOT: |
518 GenerateHeapNumberStubBitNot(masm); | 518 GenerateHeapNumberStubBitNot(masm); |
519 break; | 519 break; |
520 default: | 520 default: |
521 UNREACHABLE(); | 521 UNREACHABLE(); |
522 } | 522 } |
523 } | 523 } |
524 | 524 |
525 | 525 |
526 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 526 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
527 Label non_smi, slow; | 527 Label non_smi, slow, call_builtin; |
528 GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear); | 528 GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear); |
529 __ bind(&non_smi); | 529 __ bind(&non_smi); |
530 GenerateHeapNumberCodeSub(masm, &slow); | 530 GenerateHeapNumberCodeSub(masm, &slow); |
531 __ bind(&slow); | 531 __ bind(&slow); |
532 GenerateTypeTransition(masm); | 532 GenerateTypeTransition(masm); |
| 533 __ bind(&call_builtin); |
| 534 GenerateGenericCodeFallback(masm); |
533 } | 535 } |
534 | 536 |
535 | 537 |
536 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 538 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( |
537 MacroAssembler* masm) { | 539 MacroAssembler* masm) { |
538 Label non_smi, slow; | 540 Label non_smi, slow; |
539 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); | 541 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear); |
540 __ bind(&non_smi); | 542 __ bind(&non_smi); |
541 GenerateHeapNumberCodeBitNot(masm, &slow); | 543 GenerateHeapNumberCodeBitNot(masm, &slow); |
542 __ bind(&slow); | 544 __ bind(&slow); |
(...skipping 4579 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5122 __ Drop(1); | 5124 __ Drop(1); |
5123 __ ret(2 * kPointerSize); | 5125 __ ret(2 * kPointerSize); |
5124 } | 5126 } |
5125 | 5127 |
5126 | 5128 |
5127 #undef __ | 5129 #undef __ |
5128 | 5130 |
5129 } } // namespace v8::internal | 5131 } } // namespace v8::internal |
5130 | 5132 |
5131 #endif // V8_TARGET_ARCH_X64 | 5133 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |