| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/arm64/assembler-arm64-inl.h" | 7 #include "src/arm64/assembler-arm64-inl.h" |
| 8 #include "src/arm64/frames-arm64.h" | 8 #include "src/arm64/frames-arm64.h" |
| 9 #include "src/arm64/macro-assembler-arm64-inl.h" | 9 #include "src/arm64/macro-assembler-arm64-inl.h" |
| 10 #include "src/compilation-info.h" | 10 #include "src/compilation-info.h" |
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 246 DCHECK(op->IsStackSlot() || op->IsFPStackSlot()); | 246 DCHECK(op->IsStackSlot() || op->IsFPStackSlot()); |
| 247 return SlotToMemOperand(AllocatedOperand::cast(op)->index(), masm); | 247 return SlotToMemOperand(AllocatedOperand::cast(op)->index(), masm); |
| 248 } | 248 } |
| 249 | 249 |
| 250 MemOperand SlotToMemOperand(int slot, MacroAssembler* masm) const { | 250 MemOperand SlotToMemOperand(int slot, MacroAssembler* masm) const { |
| 251 FrameOffset offset = frame_access_state()->GetFrameOffset(slot); | 251 FrameOffset offset = frame_access_state()->GetFrameOffset(slot); |
| 252 if (offset.from_frame_pointer()) { | 252 if (offset.from_frame_pointer()) { |
| 253 int from_sp = offset.offset() + frame_access_state()->GetSPToFPOffset(); | 253 int from_sp = offset.offset() + frame_access_state()->GetSPToFPOffset(); |
| 254 // Convert FP-offsets to SP-offsets if it results in better code. | 254 // Convert FP-offsets to SP-offsets if it results in better code. |
| 255 if (Assembler::IsImmLSUnscaled(from_sp) || | 255 if (Assembler::IsImmLSUnscaled(from_sp) || |
| 256 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) { | 256 Assembler::IsImmLSScaled(from_sp, 3)) { |
| 257 offset = FrameOffset::FromStackPointer(from_sp); | 257 offset = FrameOffset::FromStackPointer(from_sp); |
| 258 } | 258 } |
| 259 } | 259 } |
| 260 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, | 260 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp, |
| 261 offset.offset()); | 261 offset.offset()); |
| 262 } | 262 } |
| 263 }; | 263 }; |
| 264 | 264 |
| 265 | 265 |
| 266 namespace { | 266 namespace { |
| (...skipping 1671 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1938 frame->AlignFrame(16); | 1938 frame->AlignFrame(16); |
| 1939 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1939 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1940 | 1940 |
| 1941 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) { | 1941 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) { |
| 1942 __ SetStackPointer(csp); | 1942 __ SetStackPointer(csp); |
| 1943 } else { | 1943 } else { |
| 1944 __ SetStackPointer(jssp); | 1944 __ SetStackPointer(jssp); |
| 1945 } | 1945 } |
| 1946 | 1946 |
| 1947 // Save FP registers. | 1947 // Save FP registers. |
| 1948 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 1948 CPURegList saves_fp = CPURegList(CPURegister::kVRegister, kDRegSizeInBits, |
| 1949 descriptor->CalleeSavedFPRegisters()); | 1949 descriptor->CalleeSavedFPRegisters()); |
| 1950 int saved_count = saves_fp.Count(); | 1950 int saved_count = saves_fp.Count(); |
| 1951 if (saved_count != 0) { | 1951 if (saved_count != 0) { |
| 1952 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list()); | 1952 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedV().list()); |
| 1953 frame->AllocateSavedCalleeRegisterSlots(saved_count * | 1953 frame->AllocateSavedCalleeRegisterSlots(saved_count * |
| 1954 (kDoubleSize / kPointerSize)); | 1954 (kDoubleSize / kPointerSize)); |
| 1955 } | 1955 } |
| 1956 | 1956 |
| 1957 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits, | 1957 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits, |
| 1958 descriptor->CalleeSavedRegisters()); | 1958 descriptor->CalleeSavedRegisters()); |
| 1959 saved_count = saves.Count(); | 1959 saved_count = saves.Count(); |
| 1960 if (saved_count != 0) { | 1960 if (saved_count != 0) { |
| 1961 frame->AllocateSavedCalleeRegisterSlots(saved_count); | 1961 frame->AllocateSavedCalleeRegisterSlots(saved_count); |
| 1962 } | 1962 } |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2061 !descriptor->IsJSFunctionCall() && !descriptor->IsCFunctionCall(); | 2061 !descriptor->IsJSFunctionCall() && !descriptor->IsCFunctionCall(); |
| 2062 if (is_stub_frame) { | 2062 if (is_stub_frame) { |
| 2063 UseScratchRegisterScope temps(masm()); | 2063 UseScratchRegisterScope temps(masm()); |
| 2064 Register temp = temps.AcquireX(); | 2064 Register temp = temps.AcquireX(); |
| 2065 __ Mov(temp, StackFrame::TypeToMarker(info()->GetOutputStackFrameType())); | 2065 __ Mov(temp, StackFrame::TypeToMarker(info()->GetOutputStackFrameType())); |
| 2066 __ Str(temp, MemOperand(fp, TypedFrameConstants::kFrameTypeOffset)); | 2066 __ Str(temp, MemOperand(fp, TypedFrameConstants::kFrameTypeOffset)); |
| 2067 } | 2067 } |
| 2068 } | 2068 } |
| 2069 | 2069 |
| 2070 // Save FP registers. | 2070 // Save FP registers. |
| 2071 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 2071 CPURegList saves_fp = CPURegList(CPURegister::kVRegister, kDRegSizeInBits, |
| 2072 descriptor->CalleeSavedFPRegisters()); | 2072 descriptor->CalleeSavedFPRegisters()); |
| 2073 int saved_count = saves_fp.Count(); | 2073 int saved_count = saves_fp.Count(); |
| 2074 if (saved_count != 0) { | 2074 if (saved_count != 0) { |
| 2075 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list()); | 2075 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedV().list()); |
| 2076 __ PushCPURegList(saves_fp); | 2076 __ PushCPURegList(saves_fp); |
| 2077 } | 2077 } |
| 2078 // Save registers. | 2078 // Save registers. |
| 2079 // TODO(palfia): TF save list is not in sync with | 2079 // TODO(palfia): TF save list is not in sync with |
| 2080 // CPURegList::GetCalleeSaved(): x30 is missing. | 2080 // CPURegList::GetCalleeSaved(): x30 is missing. |
| 2081 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list()); | 2081 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list()); |
| 2082 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits, | 2082 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits, |
| 2083 descriptor->CalleeSavedRegisters()); | 2083 descriptor->CalleeSavedRegisters()); |
| 2084 saved_count = saves.Count(); | 2084 saved_count = saves.Count(); |
| 2085 if (saved_count != 0) { | 2085 if (saved_count != 0) { |
| 2086 __ PushCPURegList(saves); | 2086 __ PushCPURegList(saves); |
| 2087 } | 2087 } |
| 2088 } | 2088 } |
| 2089 | 2089 |
| 2090 void CodeGenerator::AssembleReturn(InstructionOperand* pop) { | 2090 void CodeGenerator::AssembleReturn(InstructionOperand* pop) { |
| 2091 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 2091 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 2092 | 2092 |
| 2093 // Restore registers. | 2093 // Restore registers. |
| 2094 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits, | 2094 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits, |
| 2095 descriptor->CalleeSavedRegisters()); | 2095 descriptor->CalleeSavedRegisters()); |
| 2096 if (saves.Count() != 0) { | 2096 if (saves.Count() != 0) { |
| 2097 __ PopCPURegList(saves); | 2097 __ PopCPURegList(saves); |
| 2098 } | 2098 } |
| 2099 | 2099 |
| 2100 // Restore fp registers. | 2100 // Restore fp registers. |
| 2101 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits, | 2101 CPURegList saves_fp = CPURegList(CPURegister::kVRegister, kDRegSizeInBits, |
| 2102 descriptor->CalleeSavedFPRegisters()); | 2102 descriptor->CalleeSavedFPRegisters()); |
| 2103 if (saves_fp.Count() != 0) { | 2103 if (saves_fp.Count() != 0) { |
| 2104 __ PopCPURegList(saves_fp); | 2104 __ PopCPURegList(saves_fp); |
| 2105 } | 2105 } |
| 2106 | 2106 |
| 2107 unwinding_info_writer_.MarkBlockWillExit(); | 2107 unwinding_info_writer_.MarkBlockWillExit(); |
| 2108 | 2108 |
| 2109 Arm64OperandConverter g(this, nullptr); | 2109 Arm64OperandConverter g(this, nullptr); |
| 2110 int pop_count = static_cast<int>(descriptor->StackParameterCount()); | 2110 int pop_count = static_cast<int>(descriptor->StackParameterCount()); |
| 2111 if (descriptor->IsCFunctionCall()) { | 2111 if (descriptor->IsCFunctionCall()) { |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2191 __ LoadObject(dst, src_object); | 2191 __ LoadObject(dst, src_object); |
| 2192 } | 2192 } |
| 2193 } else { | 2193 } else { |
| 2194 __ Mov(dst, g.ToImmediate(source)); | 2194 __ Mov(dst, g.ToImmediate(source)); |
| 2195 } | 2195 } |
| 2196 if (destination->IsStackSlot()) { | 2196 if (destination->IsStackSlot()) { |
| 2197 __ Str(dst, g.ToMemOperand(destination, masm())); | 2197 __ Str(dst, g.ToMemOperand(destination, masm())); |
| 2198 } | 2198 } |
| 2199 } else if (src.type() == Constant::kFloat32) { | 2199 } else if (src.type() == Constant::kFloat32) { |
| 2200 if (destination->IsFPRegister()) { | 2200 if (destination->IsFPRegister()) { |
| 2201 FPRegister dst = g.ToDoubleRegister(destination).S(); | 2201 VRegister dst = g.ToDoubleRegister(destination).S(); |
| 2202 __ Fmov(dst, src.ToFloat32()); | 2202 __ Fmov(dst, src.ToFloat32()); |
| 2203 } else { | 2203 } else { |
| 2204 DCHECK(destination->IsFPStackSlot()); | 2204 DCHECK(destination->IsFPStackSlot()); |
| 2205 if (bit_cast<int32_t>(src.ToFloat32()) == 0) { | 2205 if (bit_cast<int32_t>(src.ToFloat32()) == 0) { |
| 2206 __ Str(wzr, g.ToMemOperand(destination, masm())); | 2206 __ Str(wzr, g.ToMemOperand(destination, masm())); |
| 2207 } else { | 2207 } else { |
| 2208 UseScratchRegisterScope scope(masm()); | 2208 UseScratchRegisterScope scope(masm()); |
| 2209 FPRegister temp = scope.AcquireS(); | 2209 VRegister temp = scope.AcquireS(); |
| 2210 __ Fmov(temp, src.ToFloat32()); | 2210 __ Fmov(temp, src.ToFloat32()); |
| 2211 __ Str(temp, g.ToMemOperand(destination, masm())); | 2211 __ Str(temp, g.ToMemOperand(destination, masm())); |
| 2212 } | 2212 } |
| 2213 } | 2213 } |
| 2214 } else { | 2214 } else { |
| 2215 DCHECK_EQ(Constant::kFloat64, src.type()); | 2215 DCHECK_EQ(Constant::kFloat64, src.type()); |
| 2216 if (destination->IsFPRegister()) { | 2216 if (destination->IsFPRegister()) { |
| 2217 FPRegister dst = g.ToDoubleRegister(destination); | 2217 VRegister dst = g.ToDoubleRegister(destination); |
| 2218 __ Fmov(dst, src.ToFloat64()); | 2218 __ Fmov(dst, src.ToFloat64()); |
| 2219 } else { | 2219 } else { |
| 2220 DCHECK(destination->IsFPStackSlot()); | 2220 DCHECK(destination->IsFPStackSlot()); |
| 2221 if (bit_cast<int64_t>(src.ToFloat64()) == 0) { | 2221 if (bit_cast<int64_t>(src.ToFloat64()) == 0) { |
| 2222 __ Str(xzr, g.ToMemOperand(destination, masm())); | 2222 __ Str(xzr, g.ToMemOperand(destination, masm())); |
| 2223 } else { | 2223 } else { |
| 2224 UseScratchRegisterScope scope(masm()); | 2224 UseScratchRegisterScope scope(masm()); |
| 2225 FPRegister temp = scope.AcquireD(); | 2225 VRegister temp = scope.AcquireD(); |
| 2226 __ Fmov(temp, src.ToFloat64()); | 2226 __ Fmov(temp, src.ToFloat64()); |
| 2227 __ Str(temp, g.ToMemOperand(destination, masm())); | 2227 __ Str(temp, g.ToMemOperand(destination, masm())); |
| 2228 } | 2228 } |
| 2229 } | 2229 } |
| 2230 } | 2230 } |
| 2231 } else if (source->IsFPRegister()) { | 2231 } else if (source->IsFPRegister()) { |
| 2232 FPRegister src = g.ToDoubleRegister(source); | 2232 VRegister src = g.ToDoubleRegister(source); |
| 2233 if (destination->IsFPRegister()) { | 2233 if (destination->IsFPRegister()) { |
| 2234 FPRegister dst = g.ToDoubleRegister(destination); | 2234 VRegister dst = g.ToDoubleRegister(destination); |
| 2235 __ Fmov(dst, src); | 2235 __ Fmov(dst, src); |
| 2236 } else { | 2236 } else { |
| 2237 DCHECK(destination->IsFPStackSlot()); | 2237 DCHECK(destination->IsFPStackSlot()); |
| 2238 __ Str(src, g.ToMemOperand(destination, masm())); | 2238 __ Str(src, g.ToMemOperand(destination, masm())); |
| 2239 } | 2239 } |
| 2240 } else if (source->IsFPStackSlot()) { | 2240 } else if (source->IsFPStackSlot()) { |
| 2241 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); | 2241 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); |
| 2242 MemOperand src = g.ToMemOperand(source, masm()); | 2242 MemOperand src = g.ToMemOperand(source, masm()); |
| 2243 if (destination->IsFPRegister()) { | 2243 if (destination->IsFPRegister()) { |
| 2244 __ Ldr(g.ToDoubleRegister(destination), src); | 2244 __ Ldr(g.ToDoubleRegister(destination), src); |
| 2245 } else { | 2245 } else { |
| 2246 UseScratchRegisterScope scope(masm()); | 2246 UseScratchRegisterScope scope(masm()); |
| 2247 FPRegister temp = scope.AcquireD(); | 2247 VRegister temp = scope.AcquireD(); |
| 2248 __ Ldr(temp, src); | 2248 __ Ldr(temp, src); |
| 2249 __ Str(temp, g.ToMemOperand(destination, masm())); | 2249 __ Str(temp, g.ToMemOperand(destination, masm())); |
| 2250 } | 2250 } |
| 2251 } else { | 2251 } else { |
| 2252 UNREACHABLE(); | 2252 UNREACHABLE(); |
| 2253 } | 2253 } |
| 2254 } | 2254 } |
| 2255 | 2255 |
| 2256 | 2256 |
| 2257 void CodeGenerator::AssembleSwap(InstructionOperand* source, | 2257 void CodeGenerator::AssembleSwap(InstructionOperand* source, |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2281 DoubleRegister temp_0 = scope.AcquireD(); | 2281 DoubleRegister temp_0 = scope.AcquireD(); |
| 2282 DoubleRegister temp_1 = scope.AcquireD(); | 2282 DoubleRegister temp_1 = scope.AcquireD(); |
| 2283 MemOperand src = g.ToMemOperand(source, masm()); | 2283 MemOperand src = g.ToMemOperand(source, masm()); |
| 2284 MemOperand dst = g.ToMemOperand(destination, masm()); | 2284 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2285 __ Ldr(temp_0, src); | 2285 __ Ldr(temp_0, src); |
| 2286 __ Ldr(temp_1, dst); | 2286 __ Ldr(temp_1, dst); |
| 2287 __ Str(temp_0, dst); | 2287 __ Str(temp_0, dst); |
| 2288 __ Str(temp_1, src); | 2288 __ Str(temp_1, src); |
| 2289 } else if (source->IsFPRegister()) { | 2289 } else if (source->IsFPRegister()) { |
| 2290 UseScratchRegisterScope scope(masm()); | 2290 UseScratchRegisterScope scope(masm()); |
| 2291 FPRegister temp = scope.AcquireD(); | 2291 VRegister temp = scope.AcquireD(); |
| 2292 FPRegister src = g.ToDoubleRegister(source); | 2292 VRegister src = g.ToDoubleRegister(source); |
| 2293 if (destination->IsFPRegister()) { | 2293 if (destination->IsFPRegister()) { |
| 2294 FPRegister dst = g.ToDoubleRegister(destination); | 2294 VRegister dst = g.ToDoubleRegister(destination); |
| 2295 __ Fmov(temp, src); | 2295 __ Fmov(temp, src); |
| 2296 __ Fmov(src, dst); | 2296 __ Fmov(src, dst); |
| 2297 __ Fmov(dst, temp); | 2297 __ Fmov(dst, temp); |
| 2298 } else { | 2298 } else { |
| 2299 DCHECK(destination->IsFPStackSlot()); | 2299 DCHECK(destination->IsFPStackSlot()); |
| 2300 MemOperand dst = g.ToMemOperand(destination, masm()); | 2300 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2301 __ Fmov(temp, src); | 2301 __ Fmov(temp, src); |
| 2302 __ Ldr(src, dst); | 2302 __ Ldr(src, dst); |
| 2303 __ Str(temp, dst); | 2303 __ Str(temp, dst); |
| 2304 } | 2304 } |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2336 padding_size -= kInstructionSize; | 2336 padding_size -= kInstructionSize; |
| 2337 } | 2337 } |
| 2338 } | 2338 } |
| 2339 } | 2339 } |
| 2340 | 2340 |
| 2341 #undef __ | 2341 #undef __ |
| 2342 | 2342 |
| 2343 } // namespace compiler | 2343 } // namespace compiler |
| 2344 } // namespace internal | 2344 } // namespace internal |
| 2345 } // namespace v8 | 2345 } // namespace v8 |
| OLD | NEW |