| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 478 | 478 |
| 479 void MacroAssembler::PopSafepointRegistersAndDoubles() { | 479 void MacroAssembler::PopSafepointRegistersAndDoubles() { |
| 480 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 480 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { |
| 481 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 481 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
| 482 } | 482 } |
| 483 add(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 483 add(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * |
| 484 kDoubleSize)); | 484 kDoubleSize)); |
| 485 PopSafepointRegisters(); | 485 PopSafepointRegisters(); |
| 486 } | 486 } |
| 487 | 487 |
| 488 void MacroAssembler::StoreToSafepointRegisterSlot(Register reg) { |
| 489 str(reg, SafepointRegisterSlot(reg)); |
| 490 } |
| 491 |
| 492 |
| 488 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) { | 493 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) { |
| 489 // The registers are pushed starting with the highest encoding, | 494 // The registers are pushed starting with the highest encoding, |
| 490 // which means that lowest encodings are closest to the stack pointer. | 495 // which means that lowest encodings are closest to the stack pointer. |
| 491 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters); | 496 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters); |
| 492 return reg_code; | 497 return reg_code; |
| 493 } | 498 } |
| 494 | 499 |
| 495 | 500 |
| 501 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { |
| 502 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); |
| 503 } |
| 504 |
| 505 |
| 496 void MacroAssembler::Ldrd(Register dst1, Register dst2, | 506 void MacroAssembler::Ldrd(Register dst1, Register dst2, |
| 497 const MemOperand& src, Condition cond) { | 507 const MemOperand& src, Condition cond) { |
| 498 ASSERT(src.rm().is(no_reg)); | 508 ASSERT(src.rm().is(no_reg)); |
| 499 ASSERT(!dst1.is(lr)); // r14. | 509 ASSERT(!dst1.is(lr)); // r14. |
| 500 ASSERT_EQ(0, dst1.code() % 2); | 510 ASSERT_EQ(0, dst1.code() % 2); |
| 501 ASSERT_EQ(dst1.code() + 1, dst2.code()); | 511 ASSERT_EQ(dst1.code() + 1, dst2.code()); |
| 502 | 512 |
| 503 // Generate two ldr instructions if ldrd is not available. | 513 // Generate two ldr instructions if ldrd is not available. |
| 504 if (CpuFeatures::IsSupported(ARMv7)) { | 514 if (CpuFeatures::IsSupported(ARMv7)) { |
| 505 CpuFeatures::Scope scope(ARMv7); | 515 CpuFeatures::Scope scope(ARMv7); |
| (...skipping 1447 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1953 } | 1963 } |
| 1954 | 1964 |
| 1955 | 1965 |
| 1956 void MacroAssembler::AbortIfSmi(Register object) { | 1966 void MacroAssembler::AbortIfSmi(Register object) { |
| 1957 ASSERT_EQ(0, kSmiTag); | 1967 ASSERT_EQ(0, kSmiTag); |
| 1958 tst(object, Operand(kSmiTagMask)); | 1968 tst(object, Operand(kSmiTagMask)); |
| 1959 Assert(ne, "Operand is a smi"); | 1969 Assert(ne, "Operand is a smi"); |
| 1960 } | 1970 } |
| 1961 | 1971 |
| 1962 | 1972 |
| 1973 void MacroAssembler::AbortIfNotSmi(Register object) { |
| 1974 ASSERT_EQ(0, kSmiTag); |
| 1975 tst(object, Operand(kSmiTagMask)); |
| 1976 Assert(eq, "Operand is not smi"); |
| 1977 } |
| 1978 |
| 1979 |
| 1963 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( | 1980 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( |
| 1964 Register first, | 1981 Register first, |
| 1965 Register second, | 1982 Register second, |
| 1966 Register scratch1, | 1983 Register scratch1, |
| 1967 Register scratch2, | 1984 Register scratch2, |
| 1968 Label* failure) { | 1985 Label* failure) { |
| 1969 // Test that both first and second are sequential ASCII strings. | 1986 // Test that both first and second are sequential ASCII strings. |
| 1970 // Assume that they are non-smis. | 1987 // Assume that they are non-smis. |
| 1971 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); | 1988 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); |
| 1972 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); | 1989 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2178 Call(function); | 2195 Call(function); |
| 2179 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4; | 2196 int stack_passed_arguments = (num_arguments <= 4) ? 0 : num_arguments - 4; |
| 2180 if (OS::ActivationFrameAlignment() > kPointerSize) { | 2197 if (OS::ActivationFrameAlignment() > kPointerSize) { |
| 2181 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); | 2198 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); |
| 2182 } else { | 2199 } else { |
| 2183 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); | 2200 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); |
| 2184 } | 2201 } |
| 2185 } | 2202 } |
| 2186 | 2203 |
| 2187 | 2204 |
| 2205 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location, |
| 2206 Register result) { |
| 2207 const uint32_t kLdrOffsetMask = (1 << 12) - 1; |
| 2208 const int32_t kPCRegOffset = 2 * kPointerSize; |
| 2209 ldr(result, MemOperand(ldr_location)); |
| 2210 if (FLAG_debug_code) { |
| 2211 // Check that the instruction is a ldr reg, [pc + offset] . |
| 2212 and_(result, result, Operand(kLdrPCPattern)); |
| 2213 cmp(result, Operand(kLdrPCPattern)); |
| 2214 Check(eq, "The instruction to patch should be a load from pc."); |
| 2215 // Result was clobbered. Restore it. |
| 2216 ldr(result, MemOperand(ldr_location)); |
| 2217 } |
| 2218 // Get the address of the constant. |
| 2219 and_(result, result, Operand(kLdrOffsetMask)); |
| 2220 add(result, ldr_location, Operand(result)); |
| 2221 add(result, result, Operand(kPCRegOffset)); |
| 2222 } |
| 2223 |
| 2224 |
| 2188 #ifdef ENABLE_DEBUGGER_SUPPORT | 2225 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 2189 CodePatcher::CodePatcher(byte* address, int instructions) | 2226 CodePatcher::CodePatcher(byte* address, int instructions) |
| 2190 : address_(address), | 2227 : address_(address), |
| 2191 instructions_(instructions), | 2228 instructions_(instructions), |
| 2192 size_(instructions * Assembler::kInstrSize), | 2229 size_(instructions * Assembler::kInstrSize), |
| 2193 masm_(address, size_ + Assembler::kGap) { | 2230 masm_(address, size_ + Assembler::kGap) { |
| 2194 // Create a new macro assembler pointing to the address of the code to patch. | 2231 // Create a new macro assembler pointing to the address of the code to patch. |
| 2195 // The size is adjusted with kGap on order for the assembler to generate size | 2232 // The size is adjusted with kGap on order for the assembler to generate size |
| 2196 // bytes of instructions without failing with buffer size constraints. | 2233 // bytes of instructions without failing with buffer size constraints. |
| 2197 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2234 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2215 | 2252 |
| 2216 void CodePatcher::Emit(Address addr) { | 2253 void CodePatcher::Emit(Address addr) { |
| 2217 masm()->emit(reinterpret_cast<Instr>(addr)); | 2254 masm()->emit(reinterpret_cast<Instr>(addr)); |
| 2218 } | 2255 } |
| 2219 #endif // ENABLE_DEBUGGER_SUPPORT | 2256 #endif // ENABLE_DEBUGGER_SUPPORT |
| 2220 | 2257 |
| 2221 | 2258 |
| 2222 } } // namespace v8::internal | 2259 } } // namespace v8::internal |
| 2223 | 2260 |
| 2224 #endif // V8_TARGET_ARCH_ARM | 2261 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |