OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 LOperand* value, | 142 LOperand* value, |
143 LOperand* temp1, | 143 LOperand* temp1, |
144 LOperand* temp2); | 144 LOperand* temp2); |
145 void DoDeferredTaggedToI(LTaggedToI* instr, | 145 void DoDeferredTaggedToI(LTaggedToI* instr, |
146 LOperand* value, | 146 LOperand* value, |
147 LOperand* temp1, | 147 LOperand* temp1, |
148 LOperand* temp2); | 148 LOperand* temp2); |
149 void DoDeferredAllocate(LAllocate* instr); | 149 void DoDeferredAllocate(LAllocate* instr); |
150 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); | 150 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); |
151 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | 151 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); |
152 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, | |
153 Register result, | |
154 Register object, | |
155 Register index); | |
156 | 152 |
157 Operand ToOperand32(LOperand* op, IntegerSignedness signedness); | 153 Operand ToOperand32(LOperand* op, IntegerSignedness signedness); |
158 | 154 |
159 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | 155 static Condition TokenToCondition(Token::Value op, bool is_unsigned); |
160 void EmitGoto(int block); | 156 void EmitGoto(int block); |
161 void DoGap(LGap* instr); | 157 void DoGap(LGap* instr); |
162 | 158 |
163 // Generic version of EmitBranch. It contains some code to avoid emitting a | 159 // Generic version of EmitBranch. It contains some code to avoid emitting a |
164 // branch on the next emitted basic block where we could just fall-through. | 160 // branch on the next emitted basic block where we could just fall-through. |
165 // You shouldn't use that directly but rather consider one of the helper like | 161 // You shouldn't use that directly but rather consider one of the helper like |
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
485 | 481 |
486 protected: | 482 protected: |
487 MacroAssembler* masm() const { return codegen_->masm(); } | 483 MacroAssembler* masm() const { return codegen_->masm(); } |
488 | 484 |
489 LCodeGen* codegen_; | 485 LCodeGen* codegen_; |
490 }; | 486 }; |
491 | 487 |
492 } } // namespace v8::internal | 488 } } // namespace v8::internal |
493 | 489 |
494 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 490 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
OLD | NEW |