OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
120 Operand ToOperand(LOperand* op); | 120 Operand ToOperand(LOperand* op); |
121 Operand ToOperand32(LOperand* op); | 121 Operand ToOperand32(LOperand* op); |
122 MemOperand ToMemOperand(LOperand* op) const; | 122 MemOperand ToMemOperand(LOperand* op) const; |
123 Handle<Object> ToHandle(LConstantOperand* op) const; | 123 Handle<Object> ToHandle(LConstantOperand* op) const; |
124 | 124 |
125 // TODO(jbramley): Examine these helpers and check that they make sense. | 125 // TODO(jbramley): Examine these helpers and check that they make sense. |
126 // IsInteger32Constant returns true for smi constants, for example. | 126 // IsInteger32Constant returns true for smi constants, for example. |
127 bool IsInteger32Constant(LConstantOperand* op) const; | 127 bool IsInteger32Constant(LConstantOperand* op) const; |
128 bool IsSmi(LConstantOperand* op) const; | 128 bool IsSmi(LConstantOperand* op) const; |
129 | 129 |
130 int ToInteger32(LConstantOperand* op) const; | 130 int32_t ToInteger32(LConstantOperand* op) const; |
131 Smi* ToSmi(LConstantOperand* op) const; | 131 Smi* ToSmi(LConstantOperand* op) const; |
132 double ToDouble(LConstantOperand* op) const; | 132 double ToDouble(LConstantOperand* op) const; |
133 DoubleRegister ToDoubleRegister(LOperand* op) const; | 133 DoubleRegister ToDoubleRegister(LOperand* op) const; |
134 | 134 |
135 // Declare methods that deal with the individual node types. | 135 // Declare methods that deal with the individual node types. |
136 #define DECLARE_DO(type) void Do##type(L##type* node); | 136 #define DECLARE_DO(type) void Do##type(L##type* node); |
137 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) | 137 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) |
138 #undef DECLARE_DO | 138 #undef DECLARE_DO |
139 | 139 |
140 private: | 140 private: |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 LEnvironment* env); | 239 LEnvironment* env); |
240 | 240 |
241 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); | 241 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); |
242 int DefineDeoptimizationLiteral(Handle<Object> literal); | 242 int DefineDeoptimizationLiteral(Handle<Object> literal); |
243 void PopulateDeoptimizationData(Handle<Code> code); | 243 void PopulateDeoptimizationData(Handle<Code> code); |
244 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 244 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
245 | 245 |
246 void Deoptimize(LEnvironment* environment); | 246 void Deoptimize(LEnvironment* environment); |
247 void Deoptimize(LEnvironment* environment, | 247 void Deoptimize(LEnvironment* environment, |
248 Deoptimizer::BailoutType bailout_type); | 248 Deoptimizer::BailoutType bailout_type); |
249 void SoftDeoptimize(LEnvironment* environment); | |
250 void DeoptimizeIf(Condition cc, LEnvironment* environment); | 249 void DeoptimizeIf(Condition cc, LEnvironment* environment); |
251 void DeoptimizeIfZero(Register rt, LEnvironment* environment); | 250 void DeoptimizeIfZero(Register rt, LEnvironment* environment); |
252 void DeoptimizeIfNegative(Register rt, LEnvironment* environment); | 251 void DeoptimizeIfNegative(Register rt, LEnvironment* environment); |
253 void DeoptimizeIfSmi(Register rt, LEnvironment* environment); | 252 void DeoptimizeIfSmi(Register rt, LEnvironment* environment); |
254 void DeoptimizeIfNotSmi(Register rt, LEnvironment* environment); | 253 void DeoptimizeIfNotSmi(Register rt, LEnvironment* environment); |
255 void DeoptimizeIfRoot(Register rt, | 254 void DeoptimizeIfRoot(Register rt, |
256 Heap::RootListIndex index, | 255 Heap::RootListIndex index, |
257 LEnvironment* environment); | 256 LEnvironment* environment); |
258 void DeoptimizeIfNotRoot(Register rt, | 257 void DeoptimizeIfNotRoot(Register rt, |
259 Heap::RootListIndex index, | 258 Heap::RootListIndex index, |
260 LEnvironment* environment); | 259 LEnvironment* environment); |
| 260 void ApplyCheckIf(Condition cc, LBoundsCheck* check); |
261 | 261 |
262 MemOperand PrepareKeyedExternalArrayOperand(Register key, | 262 MemOperand PrepareKeyedExternalArrayOperand(Register key, |
263 Register base, | 263 Register base, |
264 Register scratch, | 264 Register scratch, |
265 bool key_is_smi, | 265 bool key_is_smi, |
266 bool key_is_constant, | 266 bool key_is_constant, |
267 int constant_key, | 267 int constant_key, |
268 int element_size_shift, | 268 int element_size_shift, |
269 int additional_index); | 269 int additional_index); |
270 void CalcKeyedArrayBaseRegister(Register base, | 270 void CalcKeyedArrayBaseRegister(Register base, |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
480 | 480 |
481 protected: | 481 protected: |
482 MacroAssembler* masm() const { return codegen_->masm(); } | 482 MacroAssembler* masm() const { return codegen_->masm(); } |
483 | 483 |
484 LCodeGen* codegen_; | 484 LCodeGen* codegen_; |
485 }; | 485 }; |
486 | 486 |
487 } } // namespace v8::internal | 487 } } // namespace v8::internal |
488 | 488 |
489 #endif // V8_A64_LITHIUM_CODEGEN_A64_H_ | 489 #endif // V8_A64_LITHIUM_CODEGEN_A64_H_ |
OLD | NEW |