| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 4914 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4925 if (!initialized) { | 4925 if (!initialized) { |
| 4926 PatchingAssembler patcher(old, length); | 4926 PatchingAssembler patcher(old, length); |
| 4927 MacroAssembler::EmitCodeAgeSequence(&patcher, NULL); | 4927 MacroAssembler::EmitCodeAgeSequence(&patcher, NULL); |
| 4928 initialized = true; | 4928 initialized = true; |
| 4929 } | 4929 } |
| 4930 return memcmp(sequence, old, kCodeAgeStubEntryOffset) == 0; | 4930 return memcmp(sequence, old, kCodeAgeStubEntryOffset) == 0; |
| 4931 } | 4931 } |
| 4932 #endif | 4932 #endif |
| 4933 | 4933 |
| 4934 | 4934 |
| 4935 void MacroAssembler::FlooringDiv(Register result, | |
| 4936 Register dividend, | |
| 4937 int32_t divisor) { | |
| 4938 Register tmp = WTmp0(); | |
| 4939 ASSERT(!AreAliased(result, dividend, tmp)); | |
| 4940 ASSERT(result.Is32Bits() && dividend.Is32Bits()); | |
| 4941 MultiplierAndShift ms(divisor); | |
| 4942 Mov(tmp, Operand(ms.multiplier())); | |
| 4943 Smull(result.X(), dividend, tmp); | |
| 4944 Asr(result.X(), result.X(), 32); | |
| 4945 if (divisor > 0 && ms.multiplier() < 0) Add(result, result, dividend); | |
| 4946 if (divisor < 0 && ms.multiplier() > 0) Sub(result, result, dividend); | |
| 4947 if (ms.shift() > 0) Asr(result, result, ms.shift()); | |
| 4948 Add(result, result, Operand(dividend, LSR, 31)); | |
| 4949 } | |
| 4950 | |
| 4951 | |
| 4952 #undef __ | 4935 #undef __ |
| 4953 #define __ masm-> | 4936 #define __ masm-> |
| 4954 | 4937 |
| 4955 | 4938 |
| 4956 void InlineSmiCheckInfo::Emit(MacroAssembler* masm, const Register& reg, | 4939 void InlineSmiCheckInfo::Emit(MacroAssembler* masm, const Register& reg, |
| 4957 const Label* smi_check) { | 4940 const Label* smi_check) { |
| 4958 Assembler::BlockPoolsScope scope(masm); | 4941 Assembler::BlockPoolsScope scope(masm); |
| 4959 if (reg.IsValid()) { | 4942 if (reg.IsValid()) { |
| 4960 ASSERT(smi_check->is_bound()); | 4943 ASSERT(smi_check->is_bound()); |
| 4961 ASSERT(reg.Is64Bits()); | 4944 ASSERT(reg.Is64Bits()); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4994 } | 4977 } |
| 4995 } | 4978 } |
| 4996 | 4979 |
| 4997 | 4980 |
| 4998 #undef __ | 4981 #undef __ |
| 4999 | 4982 |
| 5000 | 4983 |
| 5001 } } // namespace v8::internal | 4984 } } // namespace v8::internal |
| 5002 | 4985 |
| 5003 #endif // V8_TARGET_ARCH_A64 | 4986 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |