| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 83 info()->requires_frame(); | 83 info()->requires_frame(); |
| 84 } | 84 } |
| 85 bool NeedsDeferredFrame() const { | 85 bool NeedsDeferredFrame() const { |
| 86 return !NeedsEagerFrame() && info()->is_deferred_calling(); | 86 return !NeedsEagerFrame() && info()->is_deferred_calling(); |
| 87 } | 87 } |
| 88 | 88 |
| 89 // Support for converting LOperands to assembler types. | 89 // Support for converting LOperands to assembler types. |
| 90 Operand ToOperand(LOperand* op) const; | 90 Operand ToOperand(LOperand* op) const; |
| 91 Register ToRegister(LOperand* op) const; | 91 Register ToRegister(LOperand* op) const; |
| 92 XMMRegister ToDoubleRegister(LOperand* op) const; | 92 XMMRegister ToDoubleRegister(LOperand* op) const; |
| 93 XMMRegister ToFloat32x4Register(LOperand* op) const; |
| 94 XMMRegister ToInt32x4Register(LOperand* op) const; |
| 95 XMMRegister ToXMMRegister(LOperand* op) const; |
| 93 X87Register ToX87Register(LOperand* op) const; | 96 X87Register ToX87Register(LOperand* op) const; |
| 94 | 97 |
| 95 bool IsInteger32(LConstantOperand* op) const; | 98 bool IsInteger32(LConstantOperand* op) const; |
| 96 bool IsSmi(LConstantOperand* op) const; | 99 bool IsSmi(LConstantOperand* op) const; |
| 97 Immediate ToImmediate(LOperand* op, const Representation& r) const { | 100 Immediate ToImmediate(LOperand* op, const Representation& r) const { |
| 98 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); | 101 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); |
| 99 } | 102 } |
| 100 double ToDouble(LConstantOperand* op) const; | 103 double ToDouble(LConstantOperand* op) const; |
| 101 | 104 |
| 102 // Support for non-sse2 (x87) floating point stack handling. | 105 // Support for non-sse2 (x87) floating point stack handling. |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 146 | 149 |
| 147 // Deferred code support. | 150 // Deferred code support. |
| 148 void DoDeferredNumberTagD(LNumberTagD* instr); | 151 void DoDeferredNumberTagD(LNumberTagD* instr); |
| 149 | 152 |
| 150 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; | 153 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; |
| 151 void DoDeferredNumberTagI(LInstruction* instr, | 154 void DoDeferredNumberTagI(LInstruction* instr, |
| 152 LOperand* value, | 155 LOperand* value, |
| 153 IntegerSignedness signedness); | 156 IntegerSignedness signedness); |
| 154 | 157 |
| 155 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); | 158 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); |
| 159 void DoDeferredFloat32x4ToTagged(LInstruction* instr); |
| 160 void DoDeferredInt32x4ToTagged(LInstruction* instr); |
| 156 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); | 161 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); |
| 157 void DoDeferredStackCheck(LStackCheck* instr); | 162 void DoDeferredStackCheck(LStackCheck* instr); |
| 158 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); | 163 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); |
| 159 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); | 164 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); |
| 160 void DoDeferredAllocate(LAllocate* instr); | 165 void DoDeferredAllocate(LAllocate* instr); |
| 161 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | 166 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
| 162 Label* map_check); | 167 Label* map_check); |
| 163 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | 168 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); |
| 164 | 169 |
| 165 // Parallel move support. | 170 // Parallel move support. |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 283 int* object_index_pointer, | 288 int* object_index_pointer, |
| 284 int* dematerialized_index_pointer); | 289 int* dematerialized_index_pointer); |
| 285 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); | 290 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); |
| 286 void PopulateDeoptimizationData(Handle<Code> code); | 291 void PopulateDeoptimizationData(Handle<Code> code); |
| 287 int DefineDeoptimizationLiteral(Handle<Object> literal); | 292 int DefineDeoptimizationLiteral(Handle<Object> literal); |
| 288 | 293 |
| 289 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); | 294 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); |
| 290 | 295 |
| 291 Register ToRegister(int index) const; | 296 Register ToRegister(int index) const; |
| 292 XMMRegister ToDoubleRegister(int index) const; | 297 XMMRegister ToDoubleRegister(int index) const; |
| 298 XMMRegister ToFloat32x4Register(int index) const; |
| 299 XMMRegister ToInt32x4Register(int index) const; |
| 300 XMMRegister ToXMMRegister(int index) const; |
| 293 X87Register ToX87Register(int index) const; | 301 X87Register ToX87Register(int index) const; |
| 294 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; | 302 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; |
| 295 int32_t ToInteger32(LConstantOperand* op) const; | 303 int32_t ToInteger32(LConstantOperand* op) const; |
| 296 ExternalReference ToExternalReference(LConstantOperand* op) const; | 304 ExternalReference ToExternalReference(LConstantOperand* op) const; |
| 297 | 305 |
| 298 Operand BuildFastArrayOperand(LOperand* elements_pointer, | 306 Operand BuildFastArrayOperand(LOperand* elements_pointer, |
| 299 LOperand* key, | 307 LOperand* key, |
| 300 Representation key_representation, | 308 Representation key_representation, |
| 301 ElementsKind elements_kind, | 309 ElementsKind elements_kind, |
| 302 uint32_t offset, | 310 uint32_t offset, |
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 538 Label exit_; | 546 Label exit_; |
| 539 Label* external_exit_; | 547 Label* external_exit_; |
| 540 Label done_; | 548 Label done_; |
| 541 int instruction_index_; | 549 int instruction_index_; |
| 542 LCodeGen::X87Stack x87_stack_; | 550 LCodeGen::X87Stack x87_stack_; |
| 543 }; | 551 }; |
| 544 | 552 |
| 545 } } // namespace v8::internal | 553 } } // namespace v8::internal |
| 546 | 554 |
| 547 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_ | 555 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_ |
| OLD | NEW |