OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4914 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4925 if (!initialized) { | 4925 if (!initialized) { |
4926 PatchingAssembler patcher(old, length); | 4926 PatchingAssembler patcher(old, length); |
4927 MacroAssembler::EmitCodeAgeSequence(&patcher, NULL); | 4927 MacroAssembler::EmitCodeAgeSequence(&patcher, NULL); |
4928 initialized = true; | 4928 initialized = true; |
4929 } | 4929 } |
4930 return memcmp(sequence, old, kCodeAgeStubEntryOffset) == 0; | 4930 return memcmp(sequence, old, kCodeAgeStubEntryOffset) == 0; |
4931 } | 4931 } |
4932 #endif | 4932 #endif |
4933 | 4933 |
4934 | 4934 |
| 4935 void MacroAssembler::FlooringDiv(Register result, |
| 4936 Register dividend, |
| 4937 int32_t divisor) { |
| 4938 Register tmp = WTmp0(); |
| 4939 ASSERT(!AreAliased(result, dividend, tmp)); |
| 4940 ASSERT(result.Is32Bits() && dividend.Is32Bits()); |
| 4941 MultiplierAndShift ms(divisor); |
| 4942 Mov(tmp, Operand(ms.multiplier())); |
| 4943 Smull(result.X(), dividend, tmp); |
| 4944 Asr(result.X(), result.X(), 32); |
| 4945 if (divisor > 0 && ms.multiplier() < 0) Add(result, result, dividend); |
| 4946 if (divisor < 0 && ms.multiplier() > 0) Sub(result, result, dividend); |
| 4947 if (ms.shift() > 0) Asr(result, result, ms.shift()); |
| 4948 } |
| 4949 |
| 4950 |
4935 #undef __ | 4951 #undef __ |
4936 #define __ masm-> | 4952 #define __ masm-> |
4937 | 4953 |
4938 | 4954 |
4939 void InlineSmiCheckInfo::Emit(MacroAssembler* masm, const Register& reg, | 4955 void InlineSmiCheckInfo::Emit(MacroAssembler* masm, const Register& reg, |
4940 const Label* smi_check) { | 4956 const Label* smi_check) { |
4941 Assembler::BlockPoolsScope scope(masm); | 4957 Assembler::BlockPoolsScope scope(masm); |
4942 if (reg.IsValid()) { | 4958 if (reg.IsValid()) { |
4943 ASSERT(smi_check->is_bound()); | 4959 ASSERT(smi_check->is_bound()); |
4944 ASSERT(reg.Is64Bits()); | 4960 ASSERT(reg.Is64Bits()); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4977 } | 4993 } |
4978 } | 4994 } |
4979 | 4995 |
4980 | 4996 |
4981 #undef __ | 4997 #undef __ |
4982 | 4998 |
4983 | 4999 |
4984 } } // namespace v8::internal | 5000 } } // namespace v8::internal |
4985 | 5001 |
4986 #endif // V8_TARGET_ARCH_A64 | 5002 #endif // V8_TARGET_ARCH_A64 |
OLD | NEW |