Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(614)

Side by Side Diff: src/crankshaft/arm64/lithium-codegen-arm64.cc

Issue 2896303003: Reland of Reland of "ARM64: Add NEON support" (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" 5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h"
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/arm64/macro-assembler-arm64-inl.h" 8 #include "src/arm64/macro-assembler-arm64-inl.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/builtins/builtins-constructor.h" 10 #include "src/builtins/builtins-constructor.h"
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
172 private: 172 private:
173 Condition cond_; 173 Condition cond_;
174 const Register& value_; 174 const Register& value_;
175 uint64_t mask_; 175 uint64_t mask_;
176 }; 176 };
177 177
178 178
179 // Test the input and branch if it is non-zero and not a NaN. 179 // Test the input and branch if it is non-zero and not a NaN.
180 class BranchIfNonZeroNumber : public BranchGenerator { 180 class BranchIfNonZeroNumber : public BranchGenerator {
181 public: 181 public:
182 BranchIfNonZeroNumber(LCodeGen* codegen, const FPRegister& value, 182 BranchIfNonZeroNumber(LCodeGen* codegen, const VRegister& value,
183 const FPRegister& scratch) 183 const VRegister& scratch)
184 : BranchGenerator(codegen), value_(value), scratch_(scratch) { } 184 : BranchGenerator(codegen), value_(value), scratch_(scratch) {}
185 185
186 virtual void Emit(Label* label) const { 186 virtual void Emit(Label* label) const {
187 __ Fabs(scratch_, value_); 187 __ Fabs(scratch_, value_);
188 // Compare with 0.0. Because scratch_ is positive, the result can be one of 188 // Compare with 0.0. Because scratch_ is positive, the result can be one of
189 // nZCv (equal), nzCv (greater) or nzCV (unordered). 189 // nZCv (equal), nzCv (greater) or nzCV (unordered).
190 __ Fcmp(scratch_, 0.0); 190 __ Fcmp(scratch_, 0.0);
191 __ B(gt, label); 191 __ B(gt, label);
192 } 192 }
193 193
194 virtual void EmitInverted(Label* label) const { 194 virtual void EmitInverted(Label* label) const {
195 __ Fabs(scratch_, value_); 195 __ Fabs(scratch_, value_);
196 __ Fcmp(scratch_, 0.0); 196 __ Fcmp(scratch_, 0.0);
197 __ B(le, label); 197 __ B(le, label);
198 } 198 }
199 199
200 private: 200 private:
201 const FPRegister& value_; 201 const VRegister& value_;
202 const FPRegister& scratch_; 202 const VRegister& scratch_;
203 }; 203 };
204 204
205 205
206 // Test the input and branch if it is a heap number. 206 // Test the input and branch if it is a heap number.
207 class BranchIfHeapNumber : public BranchGenerator { 207 class BranchIfHeapNumber : public BranchGenerator {
208 public: 208 public:
209 BranchIfHeapNumber(LCodeGen* codegen, const Register& value) 209 BranchIfHeapNumber(LCodeGen* codegen, const Register& value)
210 : BranchGenerator(codegen), value_(value) { } 210 : BranchGenerator(codegen), value_(value) { }
211 211
212 virtual void Emit(Label* label) const { 212 virtual void Emit(Label* label) const {
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
540 void LCodeGen::SaveCallerDoubles() { 540 void LCodeGen::SaveCallerDoubles() {
541 DCHECK(info()->saves_caller_doubles()); 541 DCHECK(info()->saves_caller_doubles());
542 DCHECK(NeedsEagerFrame()); 542 DCHECK(NeedsEagerFrame());
543 Comment(";;; Save clobbered callee double registers"); 543 Comment(";;; Save clobbered callee double registers");
544 BitVector* doubles = chunk()->allocated_double_registers(); 544 BitVector* doubles = chunk()->allocated_double_registers();
545 BitVector::Iterator iterator(doubles); 545 BitVector::Iterator iterator(doubles);
546 int count = 0; 546 int count = 0;
547 while (!iterator.Done()) { 547 while (!iterator.Done()) {
548 // TODO(all): Is this supposed to save just the callee-saved doubles? It 548 // TODO(all): Is this supposed to save just the callee-saved doubles? It
549 // looks like it's saving all of them. 549 // looks like it's saving all of them.
550 FPRegister value = FPRegister::from_code(iterator.Current()); 550 VRegister value = VRegister::from_code(iterator.Current());
551 __ Poke(value, count * kDoubleSize); 551 __ Poke(value, count * kDoubleSize);
552 iterator.Advance(); 552 iterator.Advance();
553 count++; 553 count++;
554 } 554 }
555 } 555 }
556 556
557 557
558 void LCodeGen::RestoreCallerDoubles() { 558 void LCodeGen::RestoreCallerDoubles() {
559 DCHECK(info()->saves_caller_doubles()); 559 DCHECK(info()->saves_caller_doubles());
560 DCHECK(NeedsEagerFrame()); 560 DCHECK(NeedsEagerFrame());
561 Comment(";;; Restore clobbered callee double registers"); 561 Comment(";;; Restore clobbered callee double registers");
562 BitVector* doubles = chunk()->allocated_double_registers(); 562 BitVector* doubles = chunk()->allocated_double_registers();
563 BitVector::Iterator iterator(doubles); 563 BitVector::Iterator iterator(doubles);
564 int count = 0; 564 int count = 0;
565 while (!iterator.Done()) { 565 while (!iterator.Done()) {
566 // TODO(all): Is this supposed to restore just the callee-saved doubles? It 566 // TODO(all): Is this supposed to restore just the callee-saved doubles? It
567 // looks like it's restoring all of them. 567 // looks like it's restoring all of them.
568 FPRegister value = FPRegister::from_code(iterator.Current()); 568 VRegister value = VRegister::from_code(iterator.Current());
569 __ Peek(value, count * kDoubleSize); 569 __ Peek(value, count * kDoubleSize);
570 iterator.Advance(); 570 iterator.Advance();
571 count++; 571 count++;
572 } 572 }
573 } 573 }
574 574
575 575
576 bool LCodeGen::GeneratePrologue() { 576 bool LCodeGen::GeneratePrologue() {
577 DCHECK(is_generating()); 577 DCHECK(is_generating());
578 578
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after
1126 // references the end of the double registers and not the end of the stack 1126 // references the end of the double registers and not the end of the stack
1127 // slots. 1127 // slots.
1128 // In both of the cases above, we _could_ add the tracking information 1128 // In both of the cases above, we _could_ add the tracking information
1129 // required so that we can use jssp here, but in practice it isn't worth it. 1129 // required so that we can use jssp here, but in practice it isn't worth it.
1130 if ((stack_mode == kCanUseStackPointer) && 1130 if ((stack_mode == kCanUseStackPointer) &&
1131 !info()->saves_caller_doubles()) { 1131 !info()->saves_caller_doubles()) {
1132 int jssp_offset_to_fp = 1132 int jssp_offset_to_fp =
1133 (pushed_arguments_ + GetTotalFrameSlotCount()) * kPointerSize - 1133 (pushed_arguments_ + GetTotalFrameSlotCount()) * kPointerSize -
1134 StandardFrameConstants::kFixedFrameSizeAboveFp; 1134 StandardFrameConstants::kFixedFrameSizeAboveFp;
1135 int jssp_offset = fp_offset + jssp_offset_to_fp; 1135 int jssp_offset = fp_offset + jssp_offset_to_fp;
1136 if (masm()->IsImmLSScaled(jssp_offset, LSDoubleWord)) { 1136 if (masm()->IsImmLSScaled(jssp_offset, kPointerSizeLog2)) {
1137 return MemOperand(masm()->StackPointer(), jssp_offset); 1137 return MemOperand(masm()->StackPointer(), jssp_offset);
1138 } 1138 }
1139 } 1139 }
1140 return MemOperand(fp, fp_offset); 1140 return MemOperand(fp, fp_offset);
1141 } else { 1141 } else {
1142 // Retrieve parameter without eager stack-frame relative to the 1142 // Retrieve parameter without eager stack-frame relative to the
1143 // stack-pointer. 1143 // stack-pointer.
1144 return MemOperand(masm()->StackPointer(), 1144 return MemOperand(masm()->StackPointer(),
1145 ArgumentsOffsetWithoutFrame(op->index())); 1145 ArgumentsOffsetWithoutFrame(op->index()));
1146 } 1146 }
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1265 template<class InstrType> 1265 template<class InstrType>
1266 void LCodeGen::EmitTestAndBranch(InstrType instr, 1266 void LCodeGen::EmitTestAndBranch(InstrType instr,
1267 Condition condition, 1267 Condition condition,
1268 const Register& value, 1268 const Register& value,
1269 uint64_t mask) { 1269 uint64_t mask) {
1270 DCHECK((condition != al) && (condition != nv)); 1270 DCHECK((condition != al) && (condition != nv));
1271 TestAndBranch branch(this, condition, value, mask); 1271 TestAndBranch branch(this, condition, value, mask);
1272 EmitBranchGeneric(instr, branch); 1272 EmitBranchGeneric(instr, branch);
1273 } 1273 }
1274 1274
1275 1275 template <class InstrType>
1276 template<class InstrType>
1277 void LCodeGen::EmitBranchIfNonZeroNumber(InstrType instr, 1276 void LCodeGen::EmitBranchIfNonZeroNumber(InstrType instr,
1278 const FPRegister& value, 1277 const VRegister& value,
1279 const FPRegister& scratch) { 1278 const VRegister& scratch) {
1280 BranchIfNonZeroNumber branch(this, value, scratch); 1279 BranchIfNonZeroNumber branch(this, value, scratch);
1281 EmitBranchGeneric(instr, branch); 1280 EmitBranchGeneric(instr, branch);
1282 } 1281 }
1283 1282
1284 1283
1285 template<class InstrType> 1284 template<class InstrType>
1286 void LCodeGen::EmitBranchIfHeapNumber(InstrType instr, 1285 void LCodeGen::EmitBranchIfHeapNumber(InstrType instr,
1287 const Register& value) { 1286 const Register& value) {
1288 BranchIfHeapNumber branch(this, value); 1287 BranchIfHeapNumber branch(this, value);
1289 EmitBranchGeneric(instr, branch); 1288 EmitBranchGeneric(instr, branch);
(...skipping 980 matching lines...) Expand 10 before | Expand all | Expand 10 after
2270 // The name in the constructor is internalized because of the way the context 2269 // The name in the constructor is internalized because of the way the context
2271 // is booted. This routine isn't expected to work for random API-created 2270 // is booted. This routine isn't expected to work for random API-created
2272 // classes and it doesn't have to because you can't access it with natives 2271 // classes and it doesn't have to because you can't access it with natives
2273 // syntax. Since both sides are internalized it is sufficient to use an 2272 // syntax. Since both sides are internalized it is sufficient to use an
2274 // identity comparison. 2273 // identity comparison.
2275 EmitCompareAndBranch(instr, eq, scratch1, Operand(class_name)); 2274 EmitCompareAndBranch(instr, eq, scratch1, Operand(class_name));
2276 } 2275 }
2277 2276
2278 void LCodeGen::DoCmpHoleAndBranchD(LCmpHoleAndBranchD* instr) { 2277 void LCodeGen::DoCmpHoleAndBranchD(LCmpHoleAndBranchD* instr) {
2279 DCHECK(instr->hydrogen()->representation().IsDouble()); 2278 DCHECK(instr->hydrogen()->representation().IsDouble());
2280 FPRegister object = ToDoubleRegister(instr->object()); 2279 VRegister object = ToDoubleRegister(instr->object());
2281 Register temp = ToRegister(instr->temp()); 2280 Register temp = ToRegister(instr->temp());
2282 2281
2283 // If we don't have a NaN, we don't have the hole, so branch now to avoid the 2282 // If we don't have a NaN, we don't have the hole, so branch now to avoid the
2284 // (relatively expensive) hole-NaN check. 2283 // (relatively expensive) hole-NaN check.
2285 __ Fcmp(object, object); 2284 __ Fcmp(object, object);
2286 __ B(vc, instr->FalseLabel(chunk_)); 2285 __ B(vc, instr->FalseLabel(chunk_));
2287 2286
2288 // We have a NaN, but is it the hole? 2287 // We have a NaN, but is it the hole?
2289 __ Fmov(temp, object); 2288 __ Fmov(temp, object);
2290 EmitCompareAndBranch(instr, eq, temp, kHoleNanInt64); 2289 EmitCompareAndBranch(instr, eq, temp, kHoleNanInt64);
(...skipping 976 matching lines...) Expand 10 before | Expand all | Expand 10 after
3267 Register object = ToRegister(instr->object()); 3266 Register object = ToRegister(instr->object());
3268 3267
3269 if (access.IsExternalMemory()) { 3268 if (access.IsExternalMemory()) {
3270 Register result = ToRegister(instr->result()); 3269 Register result = ToRegister(instr->result());
3271 __ Load(result, MemOperand(object, offset), access.representation()); 3270 __ Load(result, MemOperand(object, offset), access.representation());
3272 return; 3271 return;
3273 } 3272 }
3274 3273
3275 if (instr->hydrogen()->representation().IsDouble()) { 3274 if (instr->hydrogen()->representation().IsDouble()) {
3276 DCHECK(access.IsInobject()); 3275 DCHECK(access.IsInobject());
3277 FPRegister result = ToDoubleRegister(instr->result()); 3276 VRegister result = ToDoubleRegister(instr->result());
3278 __ Ldr(result, FieldMemOperand(object, offset)); 3277 __ Ldr(result, FieldMemOperand(object, offset));
3279 return; 3278 return;
3280 } 3279 }
3281 3280
3282 Register result = ToRegister(instr->result()); 3281 Register result = ToRegister(instr->result());
3283 Register source; 3282 Register source;
3284 if (access.IsInobject()) { 3283 if (access.IsInobject()) {
3285 source = object; 3284 source = object;
3286 } else { 3285 } else {
3287 // Load the properties array, using result as a scratch register. 3286 // Load the properties array, using result as a scratch register.
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
3427 // never get set by the negation. This is therefore the same as the Integer32 3426 // never get set by the negation. This is therefore the same as the Integer32
3428 // case in DoMathAbs, except that it operates on 64-bit values. 3427 // case in DoMathAbs, except that it operates on 64-bit values.
3429 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0)); 3428 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0));
3430 3429
3431 __ JumpIfNotSmi(input, deferred->entry()); 3430 __ JumpIfNotSmi(input, deferred->entry());
3432 3431
3433 __ Abs(result, input, NULL, &done); 3432 __ Abs(result, input, NULL, &done);
3434 3433
3435 // The result is the magnitude (abs) of the smallest value a smi can 3434 // The result is the magnitude (abs) of the smallest value a smi can
3436 // represent, encoded as a double. 3435 // represent, encoded as a double.
3437 __ Mov(result_bits, double_to_rawbits(0x80000000)); 3436 __ Mov(result_bits, bit_cast<uint64_t>(static_cast<double>(0x80000000)));
3438 __ B(deferred->allocation_entry()); 3437 __ B(deferred->allocation_entry());
3439 3438
3440 __ Bind(deferred->exit()); 3439 __ Bind(deferred->exit());
3441 __ Str(result_bits, FieldMemOperand(result, HeapNumber::kValueOffset)); 3440 __ Str(result_bits, FieldMemOperand(result, HeapNumber::kValueOffset));
3442 3441
3443 __ Bind(&done); 3442 __ Bind(&done);
3444 } 3443 }
3445 3444
3446 void LCodeGen::DoMathCos(LMathCos* instr) { 3445 void LCodeGen::DoMathCos(LMathCos* instr) {
3447 DCHECK(instr->IsMarkedAsCall()); 3446 DCHECK(instr->IsMarkedAsCall());
(...skipping 1521 matching lines...) Expand 10 before | Expand all | Expand 10 after
4969 __ Store(value, MemOperand(object, offset), representation); 4968 __ Store(value, MemOperand(object, offset), representation);
4970 return; 4969 return;
4971 } 4970 }
4972 4971
4973 __ AssertNotSmi(object); 4972 __ AssertNotSmi(object);
4974 4973
4975 if (!FLAG_unbox_double_fields && representation.IsDouble()) { 4974 if (!FLAG_unbox_double_fields && representation.IsDouble()) {
4976 DCHECK(access.IsInobject()); 4975 DCHECK(access.IsInobject());
4977 DCHECK(!instr->hydrogen()->has_transition()); 4976 DCHECK(!instr->hydrogen()->has_transition());
4978 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); 4977 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4979 FPRegister value = ToDoubleRegister(instr->value()); 4978 VRegister value = ToDoubleRegister(instr->value());
4980 __ Str(value, FieldMemOperand(object, offset)); 4979 __ Str(value, FieldMemOperand(object, offset));
4981 return; 4980 return;
4982 } 4981 }
4983 4982
4984 DCHECK(!representation.IsSmi() || 4983 DCHECK(!representation.IsSmi() ||
4985 !instr->value()->IsConstantOperand() || 4984 !instr->value()->IsConstantOperand() ||
4986 IsInteger32Constant(LConstantOperand::cast(instr->value()))); 4985 IsInteger32Constant(LConstantOperand::cast(instr->value())));
4987 4986
4988 if (instr->hydrogen()->has_transition()) { 4987 if (instr->hydrogen()->has_transition()) {
4989 Handle<Map> transition = instr->hydrogen()->transition_map(); 4988 Handle<Map> transition = instr->hydrogen()->transition_map();
(...skipping 17 matching lines...) Expand all
5007 if (access.IsInobject()) { 5006 if (access.IsInobject()) {
5008 destination = object; 5007 destination = object;
5009 } else { 5008 } else {
5010 Register temp0 = ToRegister(instr->temp0()); 5009 Register temp0 = ToRegister(instr->temp0());
5011 __ Ldr(temp0, FieldMemOperand(object, JSObject::kPropertiesOffset)); 5010 __ Ldr(temp0, FieldMemOperand(object, JSObject::kPropertiesOffset));
5012 destination = temp0; 5011 destination = temp0;
5013 } 5012 }
5014 5013
5015 if (FLAG_unbox_double_fields && representation.IsDouble()) { 5014 if (FLAG_unbox_double_fields && representation.IsDouble()) {
5016 DCHECK(access.IsInobject()); 5015 DCHECK(access.IsInobject());
5017 FPRegister value = ToDoubleRegister(instr->value()); 5016 VRegister value = ToDoubleRegister(instr->value());
5018 __ Str(value, FieldMemOperand(object, offset)); 5017 __ Str(value, FieldMemOperand(object, offset));
5019 } else if (representation.IsSmi() && 5018 } else if (representation.IsSmi() &&
5020 instr->hydrogen()->value()->representation().IsInteger32()) { 5019 instr->hydrogen()->value()->representation().IsInteger32()) {
5021 DCHECK(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); 5020 DCHECK(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY);
5022 #ifdef DEBUG 5021 #ifdef DEBUG
5023 Register temp0 = ToRegister(instr->temp0()); 5022 Register temp0 = ToRegister(instr->temp0());
5024 __ Ldr(temp0, FieldMemOperand(destination, offset)); 5023 __ Ldr(temp0, FieldMemOperand(destination, offset));
5025 __ AssertSmi(temp0); 5024 __ AssertSmi(temp0);
5026 // If destination aliased temp0, restore it to the address calculated 5025 // If destination aliased temp0, restore it to the address calculated
5027 // earlier. 5026 // earlier.
(...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after
5581 // Index is equal to negated out of object property index plus 1. 5580 // Index is equal to negated out of object property index plus 1.
5582 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 5581 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
5583 __ Ldr(result, FieldMemOperand(result, 5582 __ Ldr(result, FieldMemOperand(result,
5584 FixedArray::kHeaderSize - kPointerSize)); 5583 FixedArray::kHeaderSize - kPointerSize));
5585 __ Bind(deferred->exit()); 5584 __ Bind(deferred->exit());
5586 __ Bind(&done); 5585 __ Bind(&done);
5587 } 5586 }
5588 5587
5589 } // namespace internal 5588 } // namespace internal
5590 } // namespace v8 5589 } // namespace v8
OLDNEW
« no previous file with comments | « src/crankshaft/arm64/lithium-codegen-arm64.h ('k') | src/crankshaft/arm64/lithium-gap-resolver-arm64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698