OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // |
| 3 // Copyright IBM Corp. 2012, 2013. All rights reserved. |
| 4 // |
2 // Use of this source code is governed by a BSD-style license that can be | 5 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 6 // found in the LICENSE file. |
4 | 7 |
5 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 8 #ifndef V8_PPC_LITHIUM_CODEGEN_PPC_H_ |
6 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_ | 9 #define V8_PPC_LITHIUM_CODEGEN_PPC_H_ |
7 | 10 |
8 #include "src/arm/lithium-arm.h" | 11 #include "src/ppc/lithium-ppc.h" |
9 | 12 |
10 #include "src/arm/lithium-gap-resolver-arm.h" | 13 #include "src/ppc/lithium-gap-resolver-ppc.h" |
11 #include "src/deoptimizer.h" | 14 #include "src/deoptimizer.h" |
12 #include "src/lithium-codegen.h" | 15 #include "src/lithium-codegen.h" |
13 #include "src/safepoint-table.h" | 16 #include "src/safepoint-table.h" |
14 #include "src/scopes.h" | 17 #include "src/scopes.h" |
15 #include "src/utils.h" | 18 #include "src/utils.h" |
16 | 19 |
17 namespace v8 { | 20 namespace v8 { |
18 namespace internal { | 21 namespace internal { |
19 | 22 |
20 // Forward declarations. | 23 // Forward declarations. |
21 class LDeferredCode; | 24 class LDeferredCode; |
22 class SafepointGenerator; | 25 class SafepointGenerator; |
23 | 26 |
24 class LCodeGen: public LCodeGenBase { | 27 class LCodeGen : public LCodeGenBase { |
25 public: | 28 public: |
26 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) | 29 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) |
27 : LCodeGenBase(chunk, assembler, info), | 30 : LCodeGenBase(chunk, assembler, info), |
28 deoptimizations_(4, info->zone()), | 31 deoptimizations_(4, info->zone()), |
29 deopt_jump_table_(4, info->zone()), | 32 jump_table_(4, info->zone()), |
30 deoptimization_literals_(8, info->zone()), | 33 deoptimization_literals_(8, info->zone()), |
31 inlined_function_count_(0), | 34 inlined_function_count_(0), |
32 scope_(info->scope()), | 35 scope_(info->scope()), |
33 translations_(info->zone()), | 36 translations_(info->zone()), |
34 deferred_(8, info->zone()), | 37 deferred_(8, info->zone()), |
35 osr_pc_offset_(-1), | 38 osr_pc_offset_(-1), |
36 frame_is_built_(false), | 39 frame_is_built_(false), |
37 safepoints_(info->zone()), | 40 safepoints_(info->zone()), |
38 resolver_(this), | 41 resolver_(this), |
39 expected_safepoint_kind_(Safepoint::kSimple) { | 42 expected_safepoint_kind_(Safepoint::kSimple) { |
40 PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 43 PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
41 } | 44 } |
42 | 45 |
43 | 46 |
44 int LookupDestination(int block_id) const { | 47 int LookupDestination(int block_id) const { |
45 return chunk()->LookupDestination(block_id); | 48 return chunk()->LookupDestination(block_id); |
46 } | 49 } |
47 | 50 |
48 bool IsNextEmittedBlock(int block_id) const { | 51 bool IsNextEmittedBlock(int block_id) const { |
49 return LookupDestination(block_id) == GetNextEmittedBlock(); | 52 return LookupDestination(block_id) == GetNextEmittedBlock(); |
50 } | 53 } |
51 | 54 |
52 bool NeedsEagerFrame() const { | 55 bool NeedsEagerFrame() const { |
53 return GetStackSlotCount() > 0 || | 56 return GetStackSlotCount() > 0 || info()->is_non_deferred_calling() || |
54 info()->is_non_deferred_calling() || | 57 !info()->IsStub() || info()->requires_frame(); |
55 !info()->IsStub() || | |
56 info()->requires_frame(); | |
57 } | 58 } |
58 bool NeedsDeferredFrame() const { | 59 bool NeedsDeferredFrame() const { |
59 return !NeedsEagerFrame() && info()->is_deferred_calling(); | 60 return !NeedsEagerFrame() && info()->is_deferred_calling(); |
60 } | 61 } |
61 | 62 |
62 LinkRegisterStatus GetLinkRegisterState() const { | 63 LinkRegisterStatus GetLinkRegisterState() const { |
63 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; | 64 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved; |
64 } | 65 } |
65 | 66 |
66 // Support for converting LOperands to assembler types. | 67 // Support for converting LOperands to assembler types. |
67 // LOperand must be a register. | 68 // LOperand must be a register. |
68 Register ToRegister(LOperand* op) const; | 69 Register ToRegister(LOperand* op) const; |
69 | 70 |
70 // LOperand is loaded into scratch, unless already a register. | 71 // LOperand is loaded into scratch, unless already a register. |
71 Register EmitLoadRegister(LOperand* op, Register scratch); | 72 Register EmitLoadRegister(LOperand* op, Register scratch); |
72 | 73 |
| 74 // LConstantOperand must be an Integer32 or Smi |
| 75 void EmitLoadIntegerConstant(LConstantOperand* const_op, Register dst); |
| 76 |
73 // LOperand must be a double register. | 77 // LOperand must be a double register. |
74 DwVfpRegister ToDoubleRegister(LOperand* op) const; | 78 DoubleRegister ToDoubleRegister(LOperand* op) const; |
75 | 79 |
76 // LOperand is loaded into dbl_scratch, unless already a double register. | 80 intptr_t ToRepresentation(LConstantOperand* op, |
77 DwVfpRegister EmitLoadDoubleRegister(LOperand* op, | 81 const Representation& r) const; |
78 SwVfpRegister flt_scratch, | |
79 DwVfpRegister dbl_scratch); | |
80 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; | |
81 int32_t ToInteger32(LConstantOperand* op) const; | 82 int32_t ToInteger32(LConstantOperand* op) const; |
82 Smi* ToSmi(LConstantOperand* op) const; | 83 Smi* ToSmi(LConstantOperand* op) const; |
83 double ToDouble(LConstantOperand* op) const; | 84 double ToDouble(LConstantOperand* op) const; |
84 Operand ToOperand(LOperand* op); | 85 Operand ToOperand(LOperand* op); |
85 MemOperand ToMemOperand(LOperand* op) const; | 86 MemOperand ToMemOperand(LOperand* op) const; |
86 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. | 87 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. |
87 MemOperand ToHighMemOperand(LOperand* op) const; | 88 MemOperand ToHighMemOperand(LOperand* op) const; |
88 | 89 |
89 bool IsInteger32(LConstantOperand* op) const; | 90 bool IsInteger32(LConstantOperand* op) const; |
90 bool IsSmi(LConstantOperand* op) const; | 91 bool IsSmi(LConstantOperand* op) const; |
91 Handle<Object> ToHandle(LConstantOperand* op) const; | 92 Handle<Object> ToHandle(LConstantOperand* op) const; |
92 | 93 |
93 // Try to generate code for the entire chunk, but it may fail if the | 94 // Try to generate code for the entire chunk, but it may fail if the |
94 // chunk contains constructs we cannot handle. Returns true if the | 95 // chunk contains constructs we cannot handle. Returns true if the |
95 // code generation attempt succeeded. | 96 // code generation attempt succeeded. |
96 bool GenerateCode(); | 97 bool GenerateCode(); |
97 | 98 |
98 // Finish the code by setting stack height, safepoint, and bailout | 99 // Finish the code by setting stack height, safepoint, and bailout |
99 // information on it. | 100 // information on it. |
100 void FinishCode(Handle<Code> code); | 101 void FinishCode(Handle<Code> code); |
101 | 102 |
102 // Deferred code support. | 103 // Deferred code support. |
103 void DoDeferredNumberTagD(LNumberTagD* instr); | 104 void DoDeferredNumberTagD(LNumberTagD* instr); |
104 | 105 |
105 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | 106 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; |
106 void DoDeferredNumberTagIU(LInstruction* instr, | 107 void DoDeferredNumberTagIU(LInstruction* instr, LOperand* value, |
107 LOperand* value, | 108 LOperand* temp1, LOperand* temp2, |
108 LOperand* temp1, | |
109 LOperand* temp2, | |
110 IntegerSignedness signedness); | 109 IntegerSignedness signedness); |
111 | 110 |
112 void DoDeferredTaggedToI(LTaggedToI* instr); | 111 void DoDeferredTaggedToI(LTaggedToI* instr); |
113 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); | 112 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); |
114 void DoDeferredStackCheck(LStackCheck* instr); | 113 void DoDeferredStackCheck(LStackCheck* instr); |
115 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | 114 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); |
116 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | 115 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); |
117 void DoDeferredAllocate(LAllocate* instr); | 116 void DoDeferredAllocate(LAllocate* instr); |
118 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 117 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
119 Label* map_check, Label* bool_load); | 118 Label* map_check); |
120 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | 119 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); |
121 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | 120 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, Register result, |
122 Register result, | 121 Register object, Register index); |
123 Register object, | |
124 Register index); | |
125 | 122 |
126 // Parallel move support. | 123 // Parallel move support. |
127 void DoParallelMove(LParallelMove* move); | 124 void DoParallelMove(LParallelMove* move); |
128 void DoGap(LGap* instr); | 125 void DoGap(LGap* instr); |
129 | 126 |
130 MemOperand PrepareKeyedOperand(Register key, | 127 MemOperand PrepareKeyedOperand(Register key, Register base, |
131 Register base, | 128 bool key_is_constant, bool key_is_tagged, |
132 bool key_is_constant, | 129 int constant_key, int element_size_shift, |
133 int constant_key, | |
134 int element_size, | |
135 int shift_size, | |
136 int base_offset); | 130 int base_offset); |
137 | 131 |
138 // Emit frame translation commands for an environment. | 132 // Emit frame translation commands for an environment. |
139 void WriteTranslation(LEnvironment* environment, Translation* translation); | 133 void WriteTranslation(LEnvironment* environment, Translation* translation); |
140 | 134 |
141 // Declare methods that deal with the individual node types. | 135 // Declare methods that deal with the individual node types. |
142 #define DECLARE_DO(type) void Do##type(L##type* node); | 136 #define DECLARE_DO(type) void Do##type(L##type* node); |
143 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | 137 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) |
144 #undef DECLARE_DO | 138 #undef DECLARE_DO |
145 | 139 |
146 private: | 140 private: |
147 StrictMode strict_mode() const { return info()->strict_mode(); } | 141 StrictMode strict_mode() const { return info()->strict_mode(); } |
148 | 142 |
149 Scope* scope() const { return scope_; } | 143 Scope* scope() const { return scope_; } |
150 | 144 |
151 Register scratch0() { return r9; } | 145 Register scratch0() { return r11; } |
152 LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; } | 146 DoubleRegister double_scratch0() { return kScratchDoubleReg; } |
153 | 147 |
154 LInstruction* GetNextInstruction(); | 148 LInstruction* GetNextInstruction(); |
155 | 149 |
156 void EmitClassOfTest(Label* if_true, | 150 void EmitClassOfTest(Label* if_true, Label* if_false, |
157 Label* if_false, | 151 Handle<String> class_name, Register input, |
158 Handle<String> class_name, | 152 Register temporary, Register temporary2); |
159 Register input, | |
160 Register temporary, | |
161 Register temporary2); | |
162 | 153 |
163 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } | 154 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } |
164 | 155 |
165 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } | 156 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } |
166 | 157 |
167 void SaveCallerDoubles(); | 158 void SaveCallerDoubles(); |
168 void RestoreCallerDoubles(); | 159 void RestoreCallerDoubles(); |
169 | 160 |
170 // Code generation passes. Returns true if code generation should | 161 // Code generation passes. Returns true if code generation should |
171 // continue. | 162 // continue. |
172 void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE; | 163 void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE; |
173 bool GeneratePrologue(); | 164 bool GeneratePrologue(); |
174 bool GenerateDeferredCode(); | 165 bool GenerateDeferredCode(); |
175 bool GenerateDeoptJumpTable(); | 166 bool GenerateJumpTable(); |
176 bool GenerateSafepointTable(); | 167 bool GenerateSafepointTable(); |
177 | 168 |
178 // Generates the custom OSR entrypoint and sets the osr_pc_offset. | 169 // Generates the custom OSR entrypoint and sets the osr_pc_offset. |
179 void GenerateOsrPrologue(); | 170 void GenerateOsrPrologue(); |
180 | 171 |
181 enum SafepointMode { | 172 enum SafepointMode { |
182 RECORD_SIMPLE_SAFEPOINT, | 173 RECORD_SIMPLE_SAFEPOINT, |
183 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS | 174 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS |
184 }; | 175 }; |
185 | 176 |
186 int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode); | 177 void CallCode(Handle<Code> code, RelocInfo::Mode mode, LInstruction* instr); |
187 | 178 |
188 void CallCode( | 179 void CallCodeGeneric(Handle<Code> code, RelocInfo::Mode mode, |
189 Handle<Code> code, | 180 LInstruction* instr, SafepointMode safepoint_mode); |
190 RelocInfo::Mode mode, | |
191 LInstruction* instr, | |
192 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); | |
193 | 181 |
194 void CallCodeGeneric( | 182 void CallRuntime(const Runtime::Function* function, int num_arguments, |
195 Handle<Code> code, | |
196 RelocInfo::Mode mode, | |
197 LInstruction* instr, | |
198 SafepointMode safepoint_mode, | |
199 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS); | |
200 | |
201 void CallRuntime(const Runtime::Function* function, | |
202 int num_arguments, | |
203 LInstruction* instr, | 183 LInstruction* instr, |
204 SaveFPRegsMode save_doubles = kDontSaveFPRegs); | 184 SaveFPRegsMode save_doubles = kDontSaveFPRegs); |
205 | 185 |
206 void CallRuntime(Runtime::FunctionId id, | 186 void CallRuntime(Runtime::FunctionId id, int num_arguments, |
207 int num_arguments, | |
208 LInstruction* instr) { | 187 LInstruction* instr) { |
209 const Runtime::Function* function = Runtime::FunctionForId(id); | 188 const Runtime::Function* function = Runtime::FunctionForId(id); |
210 CallRuntime(function, num_arguments, instr); | 189 CallRuntime(function, num_arguments, instr); |
211 } | 190 } |
212 | 191 |
213 void LoadContextFromDeferred(LOperand* context); | 192 void LoadContextFromDeferred(LOperand* context); |
214 void CallRuntimeFromDeferred(Runtime::FunctionId id, | 193 void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, |
215 int argc, | 194 LInstruction* instr, LOperand* context); |
216 LInstruction* instr, | |
217 LOperand* context); | |
218 | 195 |
219 enum R1State { | 196 enum R4State { R4_UNINITIALIZED, R4_CONTAINS_TARGET }; |
220 R1_UNINITIALIZED, | |
221 R1_CONTAINS_TARGET | |
222 }; | |
223 | 197 |
224 // Generate a direct call to a known function. Expects the function | 198 // Generate a direct call to a known function. Expects the function |
225 // to be in r1. | 199 // to be in r4. |
226 void CallKnownFunction(Handle<JSFunction> function, | 200 void CallKnownFunction(Handle<JSFunction> function, |
227 int formal_parameter_count, | 201 int formal_parameter_count, int arity, |
228 int arity, | 202 LInstruction* instr, R4State r4_state); |
229 LInstruction* instr, | |
230 R1State r1_state); | |
231 | 203 |
232 void RecordSafepointWithLazyDeopt(LInstruction* instr, | 204 void RecordSafepointWithLazyDeopt(LInstruction* instr, |
233 SafepointMode safepoint_mode); | 205 SafepointMode safepoint_mode); |
234 | 206 |
235 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, | 207 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, |
236 Safepoint::DeoptMode mode); | 208 Safepoint::DeoptMode mode); |
237 void DeoptimizeIf(Condition condition, | 209 void DeoptimizeIf(Condition condition, LInstruction* instr, |
238 LEnvironment* environment, | 210 const char* detail, Deoptimizer::BailoutType bailout_type, |
239 Deoptimizer::BailoutType bailout_type); | 211 CRegister cr = cr7); |
240 void DeoptimizeIf(Condition condition, LEnvironment* environment); | 212 void DeoptimizeIf(Condition condition, LInstruction* instr, |
| 213 CRegister cr = cr7, const char* detail = NULL); |
241 | 214 |
242 void AddToTranslation(LEnvironment* environment, | 215 void AddToTranslation(LEnvironment* environment, Translation* translation, |
243 Translation* translation, | 216 LOperand* op, bool is_tagged, bool is_uint32, |
244 LOperand* op, | |
245 bool is_tagged, | |
246 bool is_uint32, | |
247 int* object_index_pointer, | 217 int* object_index_pointer, |
248 int* dematerialized_index_pointer); | 218 int* dematerialized_index_pointer); |
249 void PopulateDeoptimizationData(Handle<Code> code); | 219 void PopulateDeoptimizationData(Handle<Code> code); |
250 int DefineDeoptimizationLiteral(Handle<Object> literal); | 220 int DefineDeoptimizationLiteral(Handle<Object> literal); |
251 | 221 |
252 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 222 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
253 | 223 |
254 Register ToRegister(int index) const; | 224 Register ToRegister(int index) const; |
255 DwVfpRegister ToDoubleRegister(int index) const; | 225 DoubleRegister ToDoubleRegister(int index) const; |
256 | 226 |
257 MemOperand BuildSeqStringOperand(Register string, | 227 MemOperand BuildSeqStringOperand(Register string, LOperand* index, |
258 LOperand* index, | |
259 String::Encoding encoding); | 228 String::Encoding encoding); |
260 | 229 |
261 void EmitIntegerMathAbs(LMathAbs* instr); | 230 void EmitMathAbs(LMathAbs* instr); |
| 231 #if V8_TARGET_ARCH_PPC64 |
| 232 void EmitInteger32MathAbs(LMathAbs* instr); |
| 233 #endif |
262 | 234 |
263 // Support for recording safepoint and position information. | 235 // Support for recording safepoint and position information. |
264 void RecordSafepoint(LPointerMap* pointers, | 236 void RecordSafepoint(LPointerMap* pointers, Safepoint::Kind kind, |
265 Safepoint::Kind kind, | 237 int arguments, Safepoint::DeoptMode mode); |
266 int arguments, | |
267 Safepoint::DeoptMode mode); | |
268 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); | 238 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); |
269 void RecordSafepoint(Safepoint::DeoptMode mode); | 239 void RecordSafepoint(Safepoint::DeoptMode mode); |
270 void RecordSafepointWithRegisters(LPointerMap* pointers, | 240 void RecordSafepointWithRegisters(LPointerMap* pointers, int arguments, |
271 int arguments, | |
272 Safepoint::DeoptMode mode); | 241 Safepoint::DeoptMode mode); |
273 | 242 |
274 void RecordAndWritePosition(int position) OVERRIDE; | 243 void RecordAndWritePosition(int position) OVERRIDE; |
275 | 244 |
276 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | 245 static Condition TokenToCondition(Token::Value op); |
277 void EmitGoto(int block); | 246 void EmitGoto(int block); |
278 | 247 |
279 // EmitBranch expects to be the last instruction of a block. | 248 // EmitBranch expects to be the last instruction of a block. |
280 template<class InstrType> | 249 template <class InstrType> |
281 void EmitBranch(InstrType instr, Condition condition); | 250 void EmitBranch(InstrType instr, Condition condition, CRegister cr = cr7); |
282 template<class InstrType> | 251 template <class InstrType> |
283 void EmitFalseBranch(InstrType instr, Condition condition); | 252 void EmitFalseBranch(InstrType instr, Condition condition, |
284 void EmitNumberUntagD(Register input, | 253 CRegister cr = cr7); |
285 DwVfpRegister result, | 254 void EmitNumberUntagD(LNumberUntagD* instr, Register input, |
286 bool allow_undefined_as_nan, | 255 DoubleRegister result, NumberUntagDMode mode); |
287 bool deoptimize_on_minus_zero, | |
288 LEnvironment* env, | |
289 NumberUntagDMode mode); | |
290 | 256 |
291 // Emits optimized code for typeof x == "y". Modifies input register. | 257 // Emits optimized code for typeof x == "y". Modifies input register. |
292 // Returns the condition on which a final split to | 258 // Returns the condition on which a final split to |
293 // true and false label should be made, to optimize fallthrough. | 259 // true and false label should be made, to optimize fallthrough. |
294 Condition EmitTypeofIs(Label* true_label, | 260 Condition EmitTypeofIs(Label* true_label, Label* false_label, Register input, |
295 Label* false_label, | |
296 Register input, | |
297 Handle<String> type_name); | 261 Handle<String> type_name); |
298 | 262 |
299 // Emits optimized code for %_IsObject(x). Preserves input register. | 263 // Emits optimized code for %_IsObject(x). Preserves input register. |
300 // Returns the condition on which a final split to | 264 // Returns the condition on which a final split to |
301 // true and false label should be made, to optimize fallthrough. | 265 // true and false label should be made, to optimize fallthrough. |
302 Condition EmitIsObject(Register input, | 266 Condition EmitIsObject(Register input, Register temp1, Label* is_not_object, |
303 Register temp1, | |
304 Label* is_not_object, | |
305 Label* is_object); | 267 Label* is_object); |
306 | 268 |
307 // Emits optimized code for %_IsString(x). Preserves input register. | 269 // Emits optimized code for %_IsString(x). Preserves input register. |
308 // Returns the condition on which a final split to | 270 // Returns the condition on which a final split to |
309 // true and false label should be made, to optimize fallthrough. | 271 // true and false label should be made, to optimize fallthrough. |
310 Condition EmitIsString(Register input, | 272 Condition EmitIsString(Register input, Register temp1, Label* is_not_string, |
311 Register temp1, | |
312 Label* is_not_string, | |
313 SmiCheck check_needed); | 273 SmiCheck check_needed); |
314 | 274 |
315 // Emits optimized code for %_IsConstructCall(). | 275 // Emits optimized code for %_IsConstructCall(). |
316 // Caller should branch on equal condition. | 276 // Caller should branch on equal condition. |
317 void EmitIsConstructCall(Register temp1, Register temp2); | 277 void EmitIsConstructCall(Register temp1, Register temp2); |
318 | 278 |
319 // Emits optimized code to deep-copy the contents of statically known | 279 // Emits optimized code to deep-copy the contents of statically known |
320 // object graphs (e.g. object literal boilerplate). | 280 // object graphs (e.g. object literal boilerplate). |
321 void EmitDeepCopy(Handle<JSObject> object, | 281 void EmitDeepCopy(Handle<JSObject> object, Register result, Register source, |
322 Register result, | 282 int* offset, AllocationSiteMode mode); |
323 Register source, | |
324 int* offset, | |
325 AllocationSiteMode mode); | |
326 | 283 |
327 void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE; | 284 void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE; |
328 void DoLoadKeyedExternalArray(LLoadKeyed* instr); | 285 void DoLoadKeyedExternalArray(LLoadKeyed* instr); |
329 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); | 286 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); |
330 void DoLoadKeyedFixedArray(LLoadKeyed* instr); | 287 void DoLoadKeyedFixedArray(LLoadKeyed* instr); |
331 void DoStoreKeyedExternalArray(LStoreKeyed* instr); | 288 void DoStoreKeyedExternalArray(LStoreKeyed* instr); |
332 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); | 289 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); |
333 void DoStoreKeyedFixedArray(LStoreKeyed* instr); | 290 void DoStoreKeyedFixedArray(LStoreKeyed* instr); |
334 | 291 |
335 template <class T> | 292 template <class T> |
336 void EmitVectorLoadICRegisters(T* instr); | 293 void EmitVectorLoadICRegisters(T* instr); |
337 | 294 |
338 ZoneList<LEnvironment*> deoptimizations_; | 295 ZoneList<LEnvironment*> deoptimizations_; |
339 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_; | 296 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; |
340 ZoneList<Handle<Object> > deoptimization_literals_; | 297 ZoneList<Handle<Object> > deoptimization_literals_; |
341 int inlined_function_count_; | 298 int inlined_function_count_; |
342 Scope* const scope_; | 299 Scope* const scope_; |
343 TranslationBuffer translations_; | 300 TranslationBuffer translations_; |
344 ZoneList<LDeferredCode*> deferred_; | 301 ZoneList<LDeferredCode*> deferred_; |
345 int osr_pc_offset_; | 302 int osr_pc_offset_; |
346 bool frame_is_built_; | 303 bool frame_is_built_; |
347 | 304 |
348 // Builder that keeps track of safepoints in the code. The table | 305 // Builder that keeps track of safepoints in the code. The table |
349 // itself is emitted at the end of the generated code. | 306 // itself is emitted at the end of the generated code. |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
403 LCodeGen* codegen() const { return codegen_; } | 360 LCodeGen* codegen() const { return codegen_; } |
404 MacroAssembler* masm() const { return codegen_->masm(); } | 361 MacroAssembler* masm() const { return codegen_->masm(); } |
405 | 362 |
406 private: | 363 private: |
407 LCodeGen* codegen_; | 364 LCodeGen* codegen_; |
408 Label entry_; | 365 Label entry_; |
409 Label exit_; | 366 Label exit_; |
410 Label* external_exit_; | 367 Label* external_exit_; |
411 int instruction_index_; | 368 int instruction_index_; |
412 }; | 369 }; |
| 370 } |
| 371 } // namespace v8::internal |
413 | 372 |
414 } } // namespace v8::internal | 373 #endif // V8_PPC_LITHIUM_CODEGEN_PPC_H_ |
415 | |
416 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_ | |
OLD | NEW |