Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(444)

Side by Side Diff: src/crankshaft/arm64/lithium-codegen-arm64.cc

Issue 2819093002: Revert "Reland "ARM64: Add NEON support"" (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" 5 #include "src/crankshaft/arm64/lithium-codegen-arm64.h"
6 6
7 #include "src/arm64/frames-arm64.h" 7 #include "src/arm64/frames-arm64.h"
8 #include "src/arm64/macro-assembler-arm64-inl.h" 8 #include "src/arm64/macro-assembler-arm64-inl.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/builtins/builtins-constructor.h" 10 #include "src/builtins/builtins-constructor.h"
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
172 private: 172 private:
173 Condition cond_; 173 Condition cond_;
174 const Register& value_; 174 const Register& value_;
175 uint64_t mask_; 175 uint64_t mask_;
176 }; 176 };
177 177
178 178
179 // Test the input and branch if it is non-zero and not a NaN. 179 // Test the input and branch if it is non-zero and not a NaN.
180 class BranchIfNonZeroNumber : public BranchGenerator { 180 class BranchIfNonZeroNumber : public BranchGenerator {
181 public: 181 public:
182 BranchIfNonZeroNumber(LCodeGen* codegen, const VRegister& value, 182 BranchIfNonZeroNumber(LCodeGen* codegen, const FPRegister& value,
183 const VRegister& scratch) 183 const FPRegister& scratch)
184 : BranchGenerator(codegen), value_(value), scratch_(scratch) {} 184 : BranchGenerator(codegen), value_(value), scratch_(scratch) { }
185 185
186 virtual void Emit(Label* label) const { 186 virtual void Emit(Label* label) const {
187 __ Fabs(scratch_, value_); 187 __ Fabs(scratch_, value_);
188 // Compare with 0.0. Because scratch_ is positive, the result can be one of 188 // Compare with 0.0. Because scratch_ is positive, the result can be one of
189 // nZCv (equal), nzCv (greater) or nzCV (unordered). 189 // nZCv (equal), nzCv (greater) or nzCV (unordered).
190 __ Fcmp(scratch_, 0.0); 190 __ Fcmp(scratch_, 0.0);
191 __ B(gt, label); 191 __ B(gt, label);
192 } 192 }
193 193
194 virtual void EmitInverted(Label* label) const { 194 virtual void EmitInverted(Label* label) const {
195 __ Fabs(scratch_, value_); 195 __ Fabs(scratch_, value_);
196 __ Fcmp(scratch_, 0.0); 196 __ Fcmp(scratch_, 0.0);
197 __ B(le, label); 197 __ B(le, label);
198 } 198 }
199 199
200 private: 200 private:
201 const VRegister& value_; 201 const FPRegister& value_;
202 const VRegister& scratch_; 202 const FPRegister& scratch_;
203 }; 203 };
204 204
205 205
206 // Test the input and branch if it is a heap number. 206 // Test the input and branch if it is a heap number.
207 class BranchIfHeapNumber : public BranchGenerator { 207 class BranchIfHeapNumber : public BranchGenerator {
208 public: 208 public:
209 BranchIfHeapNumber(LCodeGen* codegen, const Register& value) 209 BranchIfHeapNumber(LCodeGen* codegen, const Register& value)
210 : BranchGenerator(codegen), value_(value) { } 210 : BranchGenerator(codegen), value_(value) { }
211 211
212 virtual void Emit(Label* label) const { 212 virtual void Emit(Label* label) const {
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
540 void LCodeGen::SaveCallerDoubles() { 540 void LCodeGen::SaveCallerDoubles() {
541 DCHECK(info()->saves_caller_doubles()); 541 DCHECK(info()->saves_caller_doubles());
542 DCHECK(NeedsEagerFrame()); 542 DCHECK(NeedsEagerFrame());
543 Comment(";;; Save clobbered callee double registers"); 543 Comment(";;; Save clobbered callee double registers");
544 BitVector* doubles = chunk()->allocated_double_registers(); 544 BitVector* doubles = chunk()->allocated_double_registers();
545 BitVector::Iterator iterator(doubles); 545 BitVector::Iterator iterator(doubles);
546 int count = 0; 546 int count = 0;
547 while (!iterator.Done()) { 547 while (!iterator.Done()) {
548 // TODO(all): Is this supposed to save just the callee-saved doubles? It 548 // TODO(all): Is this supposed to save just the callee-saved doubles? It
549 // looks like it's saving all of them. 549 // looks like it's saving all of them.
550 VRegister value = VRegister::from_code(iterator.Current()); 550 FPRegister value = FPRegister::from_code(iterator.Current());
551 __ Poke(value, count * kDoubleSize); 551 __ Poke(value, count * kDoubleSize);
552 iterator.Advance(); 552 iterator.Advance();
553 count++; 553 count++;
554 } 554 }
555 } 555 }
556 556
557 557
558 void LCodeGen::RestoreCallerDoubles() { 558 void LCodeGen::RestoreCallerDoubles() {
559 DCHECK(info()->saves_caller_doubles()); 559 DCHECK(info()->saves_caller_doubles());
560 DCHECK(NeedsEagerFrame()); 560 DCHECK(NeedsEagerFrame());
561 Comment(";;; Restore clobbered callee double registers"); 561 Comment(";;; Restore clobbered callee double registers");
562 BitVector* doubles = chunk()->allocated_double_registers(); 562 BitVector* doubles = chunk()->allocated_double_registers();
563 BitVector::Iterator iterator(doubles); 563 BitVector::Iterator iterator(doubles);
564 int count = 0; 564 int count = 0;
565 while (!iterator.Done()) { 565 while (!iterator.Done()) {
566 // TODO(all): Is this supposed to restore just the callee-saved doubles? It 566 // TODO(all): Is this supposed to restore just the callee-saved doubles? It
567 // looks like it's restoring all of them. 567 // looks like it's restoring all of them.
568 VRegister value = VRegister::from_code(iterator.Current()); 568 FPRegister value = FPRegister::from_code(iterator.Current());
569 __ Peek(value, count * kDoubleSize); 569 __ Peek(value, count * kDoubleSize);
570 iterator.Advance(); 570 iterator.Advance();
571 count++; 571 count++;
572 } 572 }
573 } 573 }
574 574
575 575
576 bool LCodeGen::GeneratePrologue() { 576 bool LCodeGen::GeneratePrologue() {
577 DCHECK(is_generating()); 577 DCHECK(is_generating());
578 578
(...skipping 549 matching lines...) Expand 10 before | Expand all | Expand 10 after
1128 // references the end of the double registers and not the end of the stack 1128 // references the end of the double registers and not the end of the stack
1129 // slots. 1129 // slots.
1130 // In both of the cases above, we _could_ add the tracking information 1130 // In both of the cases above, we _could_ add the tracking information
1131 // required so that we can use jssp here, but in practice it isn't worth it. 1131 // required so that we can use jssp here, but in practice it isn't worth it.
1132 if ((stack_mode == kCanUseStackPointer) && 1132 if ((stack_mode == kCanUseStackPointer) &&
1133 !info()->saves_caller_doubles()) { 1133 !info()->saves_caller_doubles()) {
1134 int jssp_offset_to_fp = 1134 int jssp_offset_to_fp =
1135 (pushed_arguments_ + GetTotalFrameSlotCount()) * kPointerSize - 1135 (pushed_arguments_ + GetTotalFrameSlotCount()) * kPointerSize -
1136 StandardFrameConstants::kFixedFrameSizeAboveFp; 1136 StandardFrameConstants::kFixedFrameSizeAboveFp;
1137 int jssp_offset = fp_offset + jssp_offset_to_fp; 1137 int jssp_offset = fp_offset + jssp_offset_to_fp;
1138 if (masm()->IsImmLSScaled(jssp_offset, kPointerSizeLog2)) { 1138 if (masm()->IsImmLSScaled(jssp_offset, LSDoubleWord)) {
1139 return MemOperand(masm()->StackPointer(), jssp_offset); 1139 return MemOperand(masm()->StackPointer(), jssp_offset);
1140 } 1140 }
1141 } 1141 }
1142 return MemOperand(fp, fp_offset); 1142 return MemOperand(fp, fp_offset);
1143 } else { 1143 } else {
1144 // Retrieve parameter without eager stack-frame relative to the 1144 // Retrieve parameter without eager stack-frame relative to the
1145 // stack-pointer. 1145 // stack-pointer.
1146 return MemOperand(masm()->StackPointer(), 1146 return MemOperand(masm()->StackPointer(),
1147 ArgumentsOffsetWithoutFrame(op->index())); 1147 ArgumentsOffsetWithoutFrame(op->index()));
1148 } 1148 }
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1267 template<class InstrType> 1267 template<class InstrType>
1268 void LCodeGen::EmitTestAndBranch(InstrType instr, 1268 void LCodeGen::EmitTestAndBranch(InstrType instr,
1269 Condition condition, 1269 Condition condition,
1270 const Register& value, 1270 const Register& value,
1271 uint64_t mask) { 1271 uint64_t mask) {
1272 DCHECK((condition != al) && (condition != nv)); 1272 DCHECK((condition != al) && (condition != nv));
1273 TestAndBranch branch(this, condition, value, mask); 1273 TestAndBranch branch(this, condition, value, mask);
1274 EmitBranchGeneric(instr, branch); 1274 EmitBranchGeneric(instr, branch);
1275 } 1275 }
1276 1276
1277 template <class InstrType> 1277
1278 template<class InstrType>
1278 void LCodeGen::EmitBranchIfNonZeroNumber(InstrType instr, 1279 void LCodeGen::EmitBranchIfNonZeroNumber(InstrType instr,
1279 const VRegister& value, 1280 const FPRegister& value,
1280 const VRegister& scratch) { 1281 const FPRegister& scratch) {
1281 BranchIfNonZeroNumber branch(this, value, scratch); 1282 BranchIfNonZeroNumber branch(this, value, scratch);
1282 EmitBranchGeneric(instr, branch); 1283 EmitBranchGeneric(instr, branch);
1283 } 1284 }
1284 1285
1285 1286
1286 template<class InstrType> 1287 template<class InstrType>
1287 void LCodeGen::EmitBranchIfHeapNumber(InstrType instr, 1288 void LCodeGen::EmitBranchIfHeapNumber(InstrType instr,
1288 const Register& value) { 1289 const Register& value) {
1289 BranchIfHeapNumber branch(this, value); 1290 BranchIfHeapNumber branch(this, value);
1290 EmitBranchGeneric(instr, branch); 1291 EmitBranchGeneric(instr, branch);
(...skipping 980 matching lines...) Expand 10 before | Expand all | Expand 10 after
2271 // The name in the constructor is internalized because of the way the context 2272 // The name in the constructor is internalized because of the way the context
2272 // is booted. This routine isn't expected to work for random API-created 2273 // is booted. This routine isn't expected to work for random API-created
2273 // classes and it doesn't have to because you can't access it with natives 2274 // classes and it doesn't have to because you can't access it with natives
2274 // syntax. Since both sides are internalized it is sufficient to use an 2275 // syntax. Since both sides are internalized it is sufficient to use an
2275 // identity comparison. 2276 // identity comparison.
2276 EmitCompareAndBranch(instr, eq, scratch1, Operand(class_name)); 2277 EmitCompareAndBranch(instr, eq, scratch1, Operand(class_name));
2277 } 2278 }
2278 2279
2279 void LCodeGen::DoCmpHoleAndBranchD(LCmpHoleAndBranchD* instr) { 2280 void LCodeGen::DoCmpHoleAndBranchD(LCmpHoleAndBranchD* instr) {
2280 DCHECK(instr->hydrogen()->representation().IsDouble()); 2281 DCHECK(instr->hydrogen()->representation().IsDouble());
2281 VRegister object = ToDoubleRegister(instr->object()); 2282 FPRegister object = ToDoubleRegister(instr->object());
2282 Register temp = ToRegister(instr->temp()); 2283 Register temp = ToRegister(instr->temp());
2283 2284
2284 // If we don't have a NaN, we don't have the hole, so branch now to avoid the 2285 // If we don't have a NaN, we don't have the hole, so branch now to avoid the
2285 // (relatively expensive) hole-NaN check. 2286 // (relatively expensive) hole-NaN check.
2286 __ Fcmp(object, object); 2287 __ Fcmp(object, object);
2287 __ B(vc, instr->FalseLabel(chunk_)); 2288 __ B(vc, instr->FalseLabel(chunk_));
2288 2289
2289 // We have a NaN, but is it the hole? 2290 // We have a NaN, but is it the hole?
2290 __ Fmov(temp, object); 2291 __ Fmov(temp, object);
2291 EmitCompareAndBranch(instr, eq, temp, kHoleNanInt64); 2292 EmitCompareAndBranch(instr, eq, temp, kHoleNanInt64);
(...skipping 976 matching lines...) Expand 10 before | Expand all | Expand 10 after
3268 Register object = ToRegister(instr->object()); 3269 Register object = ToRegister(instr->object());
3269 3270
3270 if (access.IsExternalMemory()) { 3271 if (access.IsExternalMemory()) {
3271 Register result = ToRegister(instr->result()); 3272 Register result = ToRegister(instr->result());
3272 __ Load(result, MemOperand(object, offset), access.representation()); 3273 __ Load(result, MemOperand(object, offset), access.representation());
3273 return; 3274 return;
3274 } 3275 }
3275 3276
3276 if (instr->hydrogen()->representation().IsDouble()) { 3277 if (instr->hydrogen()->representation().IsDouble()) {
3277 DCHECK(access.IsInobject()); 3278 DCHECK(access.IsInobject());
3278 VRegister result = ToDoubleRegister(instr->result()); 3279 FPRegister result = ToDoubleRegister(instr->result());
3279 __ Ldr(result, FieldMemOperand(object, offset)); 3280 __ Ldr(result, FieldMemOperand(object, offset));
3280 return; 3281 return;
3281 } 3282 }
3282 3283
3283 Register result = ToRegister(instr->result()); 3284 Register result = ToRegister(instr->result());
3284 Register source; 3285 Register source;
3285 if (access.IsInobject()) { 3286 if (access.IsInobject()) {
3286 source = object; 3287 source = object;
3287 } else { 3288 } else {
3288 // Load the properties array, using result as a scratch register. 3289 // Load the properties array, using result as a scratch register.
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
3428 // never get set by the negation. This is therefore the same as the Integer32 3429 // never get set by the negation. This is therefore the same as the Integer32
3429 // case in DoMathAbs, except that it operates on 64-bit values. 3430 // case in DoMathAbs, except that it operates on 64-bit values.
3430 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0)); 3431 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0));
3431 3432
3432 __ JumpIfNotSmi(input, deferred->entry()); 3433 __ JumpIfNotSmi(input, deferred->entry());
3433 3434
3434 __ Abs(result, input, NULL, &done); 3435 __ Abs(result, input, NULL, &done);
3435 3436
3436 // The result is the magnitude (abs) of the smallest value a smi can 3437 // The result is the magnitude (abs) of the smallest value a smi can
3437 // represent, encoded as a double. 3438 // represent, encoded as a double.
3438 __ Mov(result_bits, bit_cast<uint64_t>(static_cast<double>(0x80000000))); 3439 __ Mov(result_bits, double_to_rawbits(0x80000000));
3439 __ B(deferred->allocation_entry()); 3440 __ B(deferred->allocation_entry());
3440 3441
3441 __ Bind(deferred->exit()); 3442 __ Bind(deferred->exit());
3442 __ Str(result_bits, FieldMemOperand(result, HeapNumber::kValueOffset)); 3443 __ Str(result_bits, FieldMemOperand(result, HeapNumber::kValueOffset));
3443 3444
3444 __ Bind(&done); 3445 __ Bind(&done);
3445 } 3446 }
3446 3447
3447 void LCodeGen::DoMathCos(LMathCos* instr) { 3448 void LCodeGen::DoMathCos(LMathCos* instr) {
3448 DCHECK(instr->IsMarkedAsCall()); 3449 DCHECK(instr->IsMarkedAsCall());
(...skipping 1521 matching lines...) Expand 10 before | Expand all | Expand 10 after
4970 __ Store(value, MemOperand(object, offset), representation); 4971 __ Store(value, MemOperand(object, offset), representation);
4971 return; 4972 return;
4972 } 4973 }
4973 4974
4974 __ AssertNotSmi(object); 4975 __ AssertNotSmi(object);
4975 4976
4976 if (!FLAG_unbox_double_fields && representation.IsDouble()) { 4977 if (!FLAG_unbox_double_fields && representation.IsDouble()) {
4977 DCHECK(access.IsInobject()); 4978 DCHECK(access.IsInobject());
4978 DCHECK(!instr->hydrogen()->has_transition()); 4979 DCHECK(!instr->hydrogen()->has_transition());
4979 DCHECK(!instr->hydrogen()->NeedsWriteBarrier()); 4980 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4980 VRegister value = ToDoubleRegister(instr->value()); 4981 FPRegister value = ToDoubleRegister(instr->value());
4981 __ Str(value, FieldMemOperand(object, offset)); 4982 __ Str(value, FieldMemOperand(object, offset));
4982 return; 4983 return;
4983 } 4984 }
4984 4985
4985 DCHECK(!representation.IsSmi() || 4986 DCHECK(!representation.IsSmi() ||
4986 !instr->value()->IsConstantOperand() || 4987 !instr->value()->IsConstantOperand() ||
4987 IsInteger32Constant(LConstantOperand::cast(instr->value()))); 4988 IsInteger32Constant(LConstantOperand::cast(instr->value())));
4988 4989
4989 if (instr->hydrogen()->has_transition()) { 4990 if (instr->hydrogen()->has_transition()) {
4990 Handle<Map> transition = instr->hydrogen()->transition_map(); 4991 Handle<Map> transition = instr->hydrogen()->transition_map();
(...skipping 17 matching lines...) Expand all
5008 if (access.IsInobject()) { 5009 if (access.IsInobject()) {
5009 destination = object; 5010 destination = object;
5010 } else { 5011 } else {
5011 Register temp0 = ToRegister(instr->temp0()); 5012 Register temp0 = ToRegister(instr->temp0());
5012 __ Ldr(temp0, FieldMemOperand(object, JSObject::kPropertiesOffset)); 5013 __ Ldr(temp0, FieldMemOperand(object, JSObject::kPropertiesOffset));
5013 destination = temp0; 5014 destination = temp0;
5014 } 5015 }
5015 5016
5016 if (FLAG_unbox_double_fields && representation.IsDouble()) { 5017 if (FLAG_unbox_double_fields && representation.IsDouble()) {
5017 DCHECK(access.IsInobject()); 5018 DCHECK(access.IsInobject());
5018 VRegister value = ToDoubleRegister(instr->value()); 5019 FPRegister value = ToDoubleRegister(instr->value());
5019 __ Str(value, FieldMemOperand(object, offset)); 5020 __ Str(value, FieldMemOperand(object, offset));
5020 } else if (representation.IsSmi() && 5021 } else if (representation.IsSmi() &&
5021 instr->hydrogen()->value()->representation().IsInteger32()) { 5022 instr->hydrogen()->value()->representation().IsInteger32()) {
5022 DCHECK(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); 5023 DCHECK(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY);
5023 #ifdef DEBUG 5024 #ifdef DEBUG
5024 Register temp0 = ToRegister(instr->temp0()); 5025 Register temp0 = ToRegister(instr->temp0());
5025 __ Ldr(temp0, FieldMemOperand(destination, offset)); 5026 __ Ldr(temp0, FieldMemOperand(destination, offset));
5026 __ AssertSmi(temp0); 5027 __ AssertSmi(temp0);
5027 // If destination aliased temp0, restore it to the address calculated 5028 // If destination aliased temp0, restore it to the address calculated
5028 // earlier. 5029 // earlier.
(...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after
5582 // Index is equal to negated out of object property index plus 1. 5583 // Index is equal to negated out of object property index plus 1.
5583 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 5584 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
5584 __ Ldr(result, FieldMemOperand(result, 5585 __ Ldr(result, FieldMemOperand(result,
5585 FixedArray::kHeaderSize - kPointerSize)); 5586 FixedArray::kHeaderSize - kPointerSize));
5586 __ Bind(deferred->exit()); 5587 __ Bind(deferred->exit());
5587 __ Bind(&done); 5588 __ Bind(&done);
5588 } 5589 }
5589 5590
5590 } // namespace internal 5591 } // namespace internal
5591 } // namespace v8 5592 } // namespace v8
OLDNEW
« no previous file with comments | « src/crankshaft/arm64/lithium-codegen-arm64.h ('k') | src/crankshaft/arm64/lithium-gap-resolver-arm64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698