OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 LOperand* value, | 142 LOperand* value, |
143 LOperand* temp1, | 143 LOperand* temp1, |
144 LOperand* temp2); | 144 LOperand* temp2); |
145 void DoDeferredTaggedToI(LTaggedToI* instr, | 145 void DoDeferredTaggedToI(LTaggedToI* instr, |
146 LOperand* value, | 146 LOperand* value, |
147 LOperand* temp1, | 147 LOperand* temp1, |
148 LOperand* temp2); | 148 LOperand* temp2); |
149 void DoDeferredAllocate(LAllocate* instr); | 149 void DoDeferredAllocate(LAllocate* instr); |
150 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); | 150 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr); |
151 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); | 151 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); |
| 152 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 153 Register result, |
| 154 Register object, |
| 155 Register index); |
152 | 156 |
153 Operand ToOperand32(LOperand* op, IntegerSignedness signedness); | 157 Operand ToOperand32(LOperand* op, IntegerSignedness signedness); |
154 | 158 |
155 static Condition TokenToCondition(Token::Value op, bool is_unsigned); | 159 static Condition TokenToCondition(Token::Value op, bool is_unsigned); |
156 void EmitGoto(int block); | 160 void EmitGoto(int block); |
157 void DoGap(LGap* instr); | 161 void DoGap(LGap* instr); |
158 | 162 |
159 // Generic version of EmitBranch. It contains some code to avoid emitting a | 163 // Generic version of EmitBranch. It contains some code to avoid emitting a |
160 // branch on the next emitted basic block where we could just fall-through. | 164 // branch on the next emitted basic block where we could just fall-through. |
161 // You shouldn't use that directly but rather consider one of the helper like | 165 // You shouldn't use that directly but rather consider one of the helper like |
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
481 | 485 |
482 protected: | 486 protected: |
483 MacroAssembler* masm() const { return codegen_->masm(); } | 487 MacroAssembler* masm() const { return codegen_->masm(); } |
484 | 488 |
485 LCodeGen* codegen_; | 489 LCodeGen* codegen_; |
486 }; | 490 }; |
487 | 491 |
488 } } // namespace v8::internal | 492 } } // namespace v8::internal |
489 | 493 |
490 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ | 494 #endif // V8_ARM64_LITHIUM_CODEGEN_ARM64_H_ |
OLD | NEW |