| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 #include "src/compiler/code-generator-impl.h" | 6 #include "src/compiler/code-generator-impl.h" |
| 7 #include "src/compiler/gap-resolver.h" | 7 #include "src/compiler/gap-resolver.h" |
| 8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
| 9 #include "src/mips/macro-assembler-mips.h" | 9 #include "src/mips/macro-assembler-mips.h" |
| 10 #include "src/scopes.h" | 10 #include "src/scopes.h" |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 99 UNREACHABLE(); | 99 UNREACHABLE(); |
| 100 } | 100 } |
| 101 UNREACHABLE(); | 101 UNREACHABLE(); |
| 102 return MemOperand(no_reg); | 102 return MemOperand(no_reg); |
| 103 } | 103 } |
| 104 | 104 |
| 105 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); } | 105 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); } |
| 106 | 106 |
| 107 MemOperand ToMemOperand(InstructionOperand* op) const { | 107 MemOperand ToMemOperand(InstructionOperand* op) const { |
| 108 DCHECK(op != NULL); | 108 DCHECK(op != NULL); |
| 109 DCHECK(!op->IsRegister()); |
| 110 DCHECK(!op->IsDoubleRegister()); |
| 109 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); | 111 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot()); |
| 110 FrameOffset offset = | 112 // The linkage computes where all spill slots are located. |
| 111 linkage()->GetFrameOffset(AllocatedOperand::cast(op)->index(), frame()); | 113 FrameOffset offset = linkage()->GetFrameOffset( |
| 114 AllocatedOperand::cast(op)->index(), frame(), 0); |
| 112 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); | 115 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); |
| 113 } | 116 } |
| 114 }; | 117 }; |
| 115 | 118 |
| 116 | 119 |
| 117 static inline bool HasRegisterInput(Instruction* instr, size_t index) { | 120 static inline bool HasRegisterInput(Instruction* instr, size_t index) { |
| 118 return instr->InputAt(index)->IsRegister(); | 121 return instr->InputAt(index)->IsRegister(); |
| 119 } | 122 } |
| 120 | 123 |
| 121 | 124 |
| (...skipping 1044 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1166 frame()->SetRegisterSaveAreaSize(register_save_area_size); | 1169 frame()->SetRegisterSaveAreaSize(register_save_area_size); |
| 1167 } else if (descriptor->IsJSFunctionCall()) { | 1170 } else if (descriptor->IsJSFunctionCall()) { |
| 1168 CompilationInfo* info = this->info(); | 1171 CompilationInfo* info = this->info(); |
| 1169 __ Prologue(info->IsCodePreAgingActive()); | 1172 __ Prologue(info->IsCodePreAgingActive()); |
| 1170 frame()->SetRegisterSaveAreaSize( | 1173 frame()->SetRegisterSaveAreaSize( |
| 1171 StandardFrameConstants::kFixedFrameSizeFromFp); | 1174 StandardFrameConstants::kFixedFrameSizeFromFp); |
| 1172 } else if (needs_frame_) { | 1175 } else if (needs_frame_) { |
| 1173 __ StubPrologue(); | 1176 __ StubPrologue(); |
| 1174 frame()->SetRegisterSaveAreaSize( | 1177 frame()->SetRegisterSaveAreaSize( |
| 1175 StandardFrameConstants::kFixedFrameSizeFromFp); | 1178 StandardFrameConstants::kFixedFrameSizeFromFp); |
| 1176 } else { | |
| 1177 frame()->SetPCOnStack(false); | |
| 1178 } | 1179 } |
| 1179 | 1180 |
| 1180 if (info()->is_osr()) { | 1181 if (info()->is_osr()) { |
| 1181 // TurboFan OSR-compiled functions cannot be entered directly. | 1182 // TurboFan OSR-compiled functions cannot be entered directly. |
| 1182 __ Abort(kShouldNotDirectlyEnterOsrFunction); | 1183 __ Abort(kShouldNotDirectlyEnterOsrFunction); |
| 1183 | 1184 |
| 1184 // Unoptimized code jumps directly to this entrypoint while the unoptimized | 1185 // Unoptimized code jumps directly to this entrypoint while the unoptimized |
| 1185 // frame is still on the stack. Optimized code uses OSR values directly from | 1186 // frame is still on the stack. Optimized code uses OSR values directly from |
| 1186 // the unoptimized frame. Thus, all that needs to be done is to allocate the | 1187 // the unoptimized frame. Thus, all that needs to be done is to allocate the |
| 1187 // remaining stack slots. | 1188 // remaining stack slots. |
| 1188 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); | 1189 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); |
| 1189 osr_pc_offset_ = __ pc_offset(); | 1190 osr_pc_offset_ = __ pc_offset(); |
| 1190 // TODO(titzer): cannot address target function == local #-1 | 1191 // TODO(titzer): cannot address target function == local #-1 |
| 1191 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1192 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1192 DCHECK(stack_slots >= frame()->GetOsrStackSlotCount()); | 1193 DCHECK(stack_slots >= frame()->GetOsrStackSlotCount()); |
| 1193 stack_slots -= frame()->GetOsrStackSlotCount(); | 1194 stack_slots -= frame()->GetOsrStackSlotCount(); |
| 1194 } | 1195 } |
| 1195 | 1196 |
| 1196 if (stack_slots > 0) { | 1197 if (stack_slots > 0) { |
| 1197 __ Dsubu(sp, sp, Operand(stack_slots * kPointerSize)); | 1198 __ Dsubu(sp, sp, Operand(stack_slots * kPointerSize)); |
| 1198 } | 1199 } |
| 1199 } | 1200 } |
| 1200 | 1201 |
| 1201 | 1202 |
| 1202 void CodeGenerator::AssembleReturn() { | 1203 void CodeGenerator::AssembleReturn() { |
| 1203 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); | 1204 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); |
| 1204 int stack_slots = frame()->GetSpillSlotCount(); | 1205 int stack_slots = frame()->GetSpillSlotCount(); |
| 1205 int pop_count = static_cast<int>(descriptor->StackParameterCount()); | |
| 1206 if (descriptor->kind() == CallDescriptor::kCallAddress) { | 1206 if (descriptor->kind() == CallDescriptor::kCallAddress) { |
| 1207 if (frame()->GetRegisterSaveAreaSize() > 0) { | 1207 if (frame()->GetRegisterSaveAreaSize() > 0) { |
| 1208 // Remove this frame's spill slots first. | 1208 // Remove this frame's spill slots first. |
| 1209 if (stack_slots > 0) { | 1209 if (stack_slots > 0) { |
| 1210 __ Daddu(sp, sp, Operand(stack_slots * kPointerSize)); | 1210 __ Daddu(sp, sp, Operand(stack_slots * kPointerSize)); |
| 1211 } | 1211 } |
| 1212 // Restore FPU registers. | 1212 // Restore FPU registers. |
| 1213 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters(); | 1213 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters(); |
| 1214 __ MultiPopFPU(saves_fpu); | 1214 __ MultiPopFPU(saves_fpu); |
| 1215 | 1215 |
| 1216 // Restore GP registers. | 1216 // Restore GP registers. |
| 1217 const RegList saves = descriptor->CalleeSavedRegisters(); | 1217 const RegList saves = descriptor->CalleeSavedRegisters(); |
| 1218 __ MultiPop(saves); | 1218 __ MultiPop(saves); |
| 1219 } | 1219 } |
| 1220 __ mov(sp, fp); | 1220 __ mov(sp, fp); |
| 1221 __ Pop(ra, fp); | 1221 __ Pop(ra, fp); |
| 1222 __ Ret(); |
| 1222 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { | 1223 } else if (descriptor->IsJSFunctionCall() || needs_frame_) { |
| 1223 // Canonicalize JSFunction return sites for now. | 1224 // Canonicalize JSFunction return sites for now. |
| 1224 if (return_label_.is_bound()) { | 1225 if (return_label_.is_bound()) { |
| 1225 __ Branch(&return_label_); | 1226 __ Branch(&return_label_); |
| 1226 return; | |
| 1227 } else { | 1227 } else { |
| 1228 __ bind(&return_label_); | 1228 __ bind(&return_label_); |
| 1229 __ mov(sp, fp); | 1229 __ mov(sp, fp); |
| 1230 __ Pop(ra, fp); | 1230 __ Pop(ra, fp); |
| 1231 int pop_count = static_cast<int>(descriptor->StackParameterCount()); |
| 1232 if (pop_count != 0) { |
| 1233 __ DropAndRet(pop_count); |
| 1234 } else { |
| 1235 __ Ret(); |
| 1236 } |
| 1231 } | 1237 } |
| 1232 } | |
| 1233 if (pop_count != 0) { | |
| 1234 __ DropAndRet(pop_count); | |
| 1235 } else { | 1238 } else { |
| 1236 __ Ret(); | 1239 __ Ret(); |
| 1237 } | 1240 } |
| 1238 } | 1241 } |
| 1239 | 1242 |
| 1240 | 1243 |
| 1241 void CodeGenerator::AssembleMove(InstructionOperand* source, | 1244 void CodeGenerator::AssembleMove(InstructionOperand* source, |
| 1242 InstructionOperand* destination) { | 1245 InstructionOperand* destination) { |
| 1243 MipsOperandConverter g(this, NULL); | 1246 MipsOperandConverter g(this, NULL); |
| 1244 // Dispatch on the source and destination operand kinds. Not all | 1247 // Dispatch on the source and destination operand kinds. Not all |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1442 } | 1445 } |
| 1443 } | 1446 } |
| 1444 } | 1447 } |
| 1445 } | 1448 } |
| 1446 | 1449 |
| 1447 #undef __ | 1450 #undef __ |
| 1448 | 1451 |
| 1449 } // namespace compiler | 1452 } // namespace compiler |
| 1450 } // namespace internal | 1453 } // namespace internal |
| 1451 } // namespace v8 | 1454 } // namespace v8 |
| OLD | NEW |