Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(506)

Side by Side Diff: src/arm64/lithium-codegen-arm64.cc

Issue 268483002: ARM64: Sign extension on MemOperand for keyed ops (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/lithium-codegen-arm64.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #include "arm64/lithium-codegen-arm64.h" 7 #include "arm64/lithium-codegen-arm64.h"
8 #include "arm64/lithium-gap-resolver-arm64.h" 8 #include "arm64/lithium-gap-resolver-arm64.h"
9 #include "code-stubs.h" 9 #include "code-stubs.h"
10 #include "stub-cache.h" 10 #include "stub-cache.h"
(...skipping 3409 matching lines...) Expand 10 before | Expand all | Expand 10 after
3420 case FAST_SMI_ELEMENTS: 3420 case FAST_SMI_ELEMENTS:
3421 case DICTIONARY_ELEMENTS: 3421 case DICTIONARY_ELEMENTS:
3422 case SLOPPY_ARGUMENTS_ELEMENTS: 3422 case SLOPPY_ARGUMENTS_ELEMENTS:
3423 UNREACHABLE(); 3423 UNREACHABLE();
3424 break; 3424 break;
3425 } 3425 }
3426 } 3426 }
3427 } 3427 }
3428 3428
3429 3429
3430 void LCodeGen::CalcKeyedArrayBaseRegister(Register base, 3430 MemOperand LCodeGen::PrepareKeyedArrayOperand(Register base,
3431 Register elements, 3431 Register elements,
3432 Register key, 3432 Register key,
3433 bool key_is_tagged, 3433 bool key_is_tagged,
3434 ElementsKind elements_kind) { 3434 ElementsKind elements_kind,
3435 Representation representation,
3436 int additional_index) {
3437 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0));
3435 int element_size_shift = ElementsKindToShiftSize(elements_kind); 3438 int element_size_shift = ElementsKindToShiftSize(elements_kind);
3436 3439
3440 // Read or write only the most-significant 32 bits in the case of fast smi
3441 // arrays with integer inputs/outputs.
3442 bool int32_and_fast_smi = representation.IsInteger32() &&
ulan 2014/05/05 08:21:21 representation.IsInteger32() should imply (instr->
m.m.capewell 2014/05/06 13:20:45 Done.
3443 (elements_kind == FAST_SMI_ELEMENTS);
3444
3437 // Even though the HLoad/StoreKeyed instructions force the input 3445 // Even though the HLoad/StoreKeyed instructions force the input
3438 // representation for the key to be an integer, the input gets replaced during 3446 // representation for the key to be an integer, the input gets replaced during
3439 // bounds check elimination with the index argument to the bounds check, which 3447 // bounds check elimination with the index argument to the bounds check, which
3440 // can be tagged, so that case must be handled here, too. 3448 // can be tagged, so that case must be handled here, too.
3441 if (key_is_tagged) { 3449 if (key_is_tagged) {
3442 __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift)); 3450 __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift));
3451 if (int32_and_fast_smi) {
3452 return UntagSmiFieldMemOperand(base, additional_index);
3453 } else {
3454 return FieldMemOperand(base, additional_index);
3455 }
3443 } else { 3456 } else {
3444 // Sign extend key because it could be a 32-bit negative value or contain 3457 // Sign extend key because it could be a 32-bit negative value or contain
3445 // garbage in the top 32-bits. The address computation happens in 64-bit. 3458 // garbage in the top 32-bits. The address computation happens in 64-bit.
3446 ASSERT((element_size_shift >= 0) && (element_size_shift <= 4)); 3459 ASSERT((element_size_shift >= 0) && (element_size_shift <= 4));
3447 __ Add(base, elements, Operand(key, SXTW, element_size_shift)); 3460 if (int32_and_fast_smi) {
3461 __ Add(base, elements, Operand(key, SXTW, element_size_shift));
3462 return UntagSmiFieldMemOperand(base, additional_index);
3463 } else {
3464 __ Add(base, elements, additional_index - kHeapObjectTag);
3465 return MemOperand(base, key, SXTW, element_size_shift);
3466 }
3448 } 3467 }
3449 } 3468 }
3450 3469
3451 3470
3452 void LCodeGen::DoLoadKeyedFixedDouble(LLoadKeyedFixedDouble* instr) { 3471 void LCodeGen::DoLoadKeyedFixedDouble(LLoadKeyedFixedDouble* instr) {
3453 Register elements = ToRegister(instr->elements()); 3472 Register elements = ToRegister(instr->elements());
3454 DoubleRegister result = ToDoubleRegister(instr->result()); 3473 DoubleRegister result = ToDoubleRegister(instr->result());
3455 Register load_base; 3474 MemOperand mem_op;
3456 int offset = 0;
3457 3475
3458 if (instr->key()->IsConstantOperand()) { 3476 if (instr->key()->IsConstantOperand()) {
3459 ASSERT(instr->hydrogen()->RequiresHoleCheck() || 3477 ASSERT(instr->hydrogen()->RequiresHoleCheck() ||
3460 (instr->temp() == NULL)); 3478 (instr->temp() == NULL));
3461 3479
3462 int constant_key = ToInteger32(LConstantOperand::cast(instr->key())); 3480 int constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
3463 if (constant_key & 0xf0000000) { 3481 if (constant_key & 0xf0000000) {
3464 Abort(kArrayIndexConstantValueTooBig); 3482 Abort(kArrayIndexConstantValueTooBig);
3465 } 3483 }
3466 offset = FixedDoubleArray::OffsetOfElementAt(constant_key + 3484 int offset = FixedDoubleArray::OffsetOfElementAt(constant_key +
3467 instr->additional_index()); 3485 instr->additional_index());
3468 load_base = elements; 3486 mem_op = FieldMemOperand(elements, offset);
3469 } else { 3487 } else {
3470 load_base = ToRegister(instr->temp()); 3488 Register load_base = ToRegister(instr->temp());
3471 Register key = ToRegister(instr->key()); 3489 Register key = ToRegister(instr->key());
3472 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); 3490 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
3473 CalcKeyedArrayBaseRegister(load_base, elements, key, key_is_tagged, 3491 int offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index());
3474 instr->hydrogen()->elements_kind()); 3492 mem_op = PrepareKeyedArrayOperand(load_base, elements, key, key_is_tagged,
3475 offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index()); 3493 instr->hydrogen()->elements_kind(),
3494 instr->hydrogen()->representation(),
3495 offset);
3476 } 3496 }
3477 __ Ldr(result, FieldMemOperand(load_base, offset)); 3497
3498 __ Ldr(result, mem_op);
3478 3499
3479 if (instr->hydrogen()->RequiresHoleCheck()) { 3500 if (instr->hydrogen()->RequiresHoleCheck()) {
3480 Register scratch = ToRegister(instr->temp()); 3501 Register scratch = ToRegister(instr->temp());
3481 3502 // Detect the hole NaN by adding one to the integer representation of the
3482 // TODO(all): Is it faster to reload this value to an integer register, or 3503 // result, and checking for overflow.
3483 // move from fp to integer? 3504 STATIC_ASSERT(kHoleNanInt64 == 0x7fffffffffffffff);
3484 __ Fmov(scratch, result); 3505 __ Ldr(scratch, mem_op);
3485 __ Cmp(scratch, kHoleNanInt64); 3506 __ Cmn(scratch, 1);
3486 DeoptimizeIf(eq, instr->environment()); 3507 DeoptimizeIf(vs, instr->environment());
3487 } 3508 }
3488 } 3509 }
3489 3510
3490 3511
3491 void LCodeGen::DoLoadKeyedFixed(LLoadKeyedFixed* instr) { 3512 void LCodeGen::DoLoadKeyedFixed(LLoadKeyedFixed* instr) {
3492 Register elements = ToRegister(instr->elements()); 3513 Register elements = ToRegister(instr->elements());
3493 Register result = ToRegister(instr->result()); 3514 Register result = ToRegister(instr->result());
3494 Register load_base; 3515 MemOperand mem_op;
3495 int offset = 0;
3496 3516
3517 Representation representation = instr->hydrogen()->representation();
3497 if (instr->key()->IsConstantOperand()) { 3518 if (instr->key()->IsConstantOperand()) {
3498 ASSERT(instr->temp() == NULL); 3519 ASSERT(instr->temp() == NULL);
3499 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); 3520 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3500 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + 3521 int offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
3501 instr->additional_index()); 3522 instr->additional_index());
3502 load_base = elements; 3523 if (representation.IsInteger32() &&
ulan 2014/05/05 08:21:21 representation.IsInteger32() should imply (instr->
m.m.capewell 2014/05/06 13:20:45 Done.
3524 (instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS)) {
3525 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) &&
3526 (kSmiTag == 0));
3527 mem_op = UntagSmiFieldMemOperand(elements, offset);
3528 } else {
3529 mem_op = FieldMemOperand(elements, offset);
3530 }
3503 } else { 3531 } else {
3504 load_base = ToRegister(instr->temp()); 3532 Register load_base = ToRegister(instr->temp());
3505 Register key = ToRegister(instr->key()); 3533 Register key = ToRegister(instr->key());
3506 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); 3534 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
3507 CalcKeyedArrayBaseRegister(load_base, elements, key, key_is_tagged, 3535 int offset = FixedArray::OffsetOfElementAt(instr->additional_index());
3508 instr->hydrogen()->elements_kind()); 3536
3509 offset = FixedArray::OffsetOfElementAt(instr->additional_index()); 3537 mem_op = PrepareKeyedArrayOperand(load_base, elements, key, key_is_tagged,
3538 instr->hydrogen()->elements_kind(),
3539 representation, offset);
3510 } 3540 }
3511 Representation representation = instr->hydrogen()->representation();
3512 3541
3513 if (representation.IsInteger32() && 3542 __ Load(result, mem_op, representation);
3514 instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS) {
3515 STATIC_ASSERT(kSmiValueSize == 32 && kSmiShift == 32 && kSmiTag == 0);
3516 __ Load(result, UntagSmiFieldMemOperand(load_base, offset),
3517 Representation::Integer32());
3518 } else {
3519 __ Load(result, FieldMemOperand(load_base, offset),
3520 representation);
3521 }
3522 3543
3523 if (instr->hydrogen()->RequiresHoleCheck()) { 3544 if (instr->hydrogen()->RequiresHoleCheck()) {
3524 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { 3545 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
3525 DeoptimizeIfNotSmi(result, instr->environment()); 3546 DeoptimizeIfNotSmi(result, instr->environment());
3526 } else { 3547 } else {
3527 DeoptimizeIfRoot(result, Heap::kTheHoleValueRootIndex, 3548 DeoptimizeIfRoot(result, Heap::kTheHoleValueRootIndex,
3528 instr->environment()); 3549 instr->environment());
3529 } 3550 }
3530 } 3551 }
3531 } 3552 }
(...skipping 1575 matching lines...) Expand 10 before | Expand all | Expand 10 after
5107 UNREACHABLE(); 5128 UNREACHABLE();
5108 break; 5129 break;
5109 } 5130 }
5110 } 5131 }
5111 } 5132 }
5112 5133
5113 5134
5114 void LCodeGen::DoStoreKeyedFixedDouble(LStoreKeyedFixedDouble* instr) { 5135 void LCodeGen::DoStoreKeyedFixedDouble(LStoreKeyedFixedDouble* instr) {
5115 Register elements = ToRegister(instr->elements()); 5136 Register elements = ToRegister(instr->elements());
5116 DoubleRegister value = ToDoubleRegister(instr->value()); 5137 DoubleRegister value = ToDoubleRegister(instr->value());
5117 Register store_base = no_reg; 5138 MemOperand mem_op;
5118 int offset = 0;
5119 5139
5120 if (instr->key()->IsConstantOperand()) { 5140 if (instr->key()->IsConstantOperand()) {
5121 int constant_key = ToInteger32(LConstantOperand::cast(instr->key())); 5141 int constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
5122 if (constant_key & 0xf0000000) { 5142 if (constant_key & 0xf0000000) {
5123 Abort(kArrayIndexConstantValueTooBig); 5143 Abort(kArrayIndexConstantValueTooBig);
5124 } 5144 }
5125 offset = FixedDoubleArray::OffsetOfElementAt(constant_key + 5145 int offset = FixedDoubleArray::OffsetOfElementAt(constant_key +
5126 instr->additional_index()); 5146 instr->additional_index());
5127 store_base = elements; 5147 mem_op = FieldMemOperand(elements, offset);
5128 } else { 5148 } else {
5129 store_base = ToRegister(instr->temp()); 5149 Register store_base = ToRegister(instr->temp());
5130 Register key = ToRegister(instr->key()); 5150 Register key = ToRegister(instr->key());
5131 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); 5151 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
5132 CalcKeyedArrayBaseRegister(store_base, elements, key, key_is_tagged, 5152 int offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index());
5133 instr->hydrogen()->elements_kind()); 5153 mem_op = PrepareKeyedArrayOperand(store_base, elements, key, key_is_tagged,
5134 offset = FixedDoubleArray::OffsetOfElementAt(instr->additional_index()); 5154 instr->hydrogen()->elements_kind(),
5155 instr->hydrogen()->representation(),
5156 offset);
5135 } 5157 }
5136 5158
5137 if (instr->NeedsCanonicalization()) { 5159 if (instr->NeedsCanonicalization()) {
5138 DoubleRegister dbl_scratch = double_scratch(); 5160 DoubleRegister dbl_scratch = double_scratch();
5139 __ Fmov(dbl_scratch, 5161 __ Fmov(dbl_scratch,
5140 FixedDoubleArray::canonical_not_the_hole_nan_as_double()); 5162 FixedDoubleArray::canonical_not_the_hole_nan_as_double());
5141 __ Fmaxnm(dbl_scratch, dbl_scratch, value); 5163 __ Fmaxnm(dbl_scratch, dbl_scratch, value);
5142 __ Str(dbl_scratch, FieldMemOperand(store_base, offset)); 5164 __ Str(dbl_scratch, mem_op);
5143 } else { 5165 } else {
5144 __ Str(value, FieldMemOperand(store_base, offset)); 5166 __ Str(value, mem_op);
5145 } 5167 }
5146 } 5168 }
5147 5169
5148 5170
5149 void LCodeGen::DoStoreKeyedFixed(LStoreKeyedFixed* instr) { 5171 void LCodeGen::DoStoreKeyedFixed(LStoreKeyedFixed* instr) {
5150 Register value = ToRegister(instr->value()); 5172 Register value = ToRegister(instr->value());
5151 Register elements = ToRegister(instr->elements()); 5173 Register elements = ToRegister(instr->elements());
5152 Register scratch = no_reg; 5174 Register scratch = no_reg;
5153 Register store_base = no_reg; 5175 Register store_base = no_reg;
5154 Register key = no_reg; 5176 Register key = no_reg;
5155 int offset = 0; 5177 MemOperand mem_op;
5156 5178
5157 if (!instr->key()->IsConstantOperand() || 5179 if (!instr->key()->IsConstantOperand() ||
5158 instr->hydrogen()->NeedsWriteBarrier()) { 5180 instr->hydrogen()->NeedsWriteBarrier()) {
5159 scratch = ToRegister(instr->temp()); 5181 scratch = ToRegister(instr->temp());
5160 } 5182 }
5161 5183
5184 Representation representation = instr->hydrogen()->value()->representation();
5162 if (instr->key()->IsConstantOperand()) { 5185 if (instr->key()->IsConstantOperand()) {
5163 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); 5186 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
5164 offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + 5187 int offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) +
5165 instr->additional_index()); 5188 instr->additional_index());
5166 store_base = elements; 5189 store_base = elements;
5190 if (representation.IsInteger32() &&
5191 (instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS)) {
ulan 2014/05/05 08:21:21 representation.IsInteger32() should imply (instr->
m.m.capewell 2014/05/06 13:20:45 Done.
5192 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) &&
5193 (kSmiTag == 0));
5194 mem_op = UntagSmiFieldMemOperand(store_base, offset);
5195 } else {
5196 mem_op = FieldMemOperand(store_base, offset);
5197 }
5167 } else { 5198 } else {
5168 store_base = scratch; 5199 store_base = scratch;
5169 key = ToRegister(instr->key()); 5200 key = ToRegister(instr->key());
5170 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); 5201 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
5171 CalcKeyedArrayBaseRegister(store_base, elements, key, key_is_tagged, 5202 int offset = FixedArray::OffsetOfElementAt(instr->additional_index());
5172 instr->hydrogen()->elements_kind()); 5203
5173 offset = FixedArray::OffsetOfElementAt(instr->additional_index()); 5204 mem_op = PrepareKeyedArrayOperand(store_base, elements, key, key_is_tagged,
5205 instr->hydrogen()->elements_kind(),
5206 representation, offset);
5174 } 5207 }
5175 Representation representation = instr->hydrogen()->value()->representation(); 5208
5176 if (representation.IsInteger32()) { 5209 __ Store(value, mem_op, representation);
5177 ASSERT(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY);
5178 ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS);
ulan 2014/05/05 08:21:21 Can we keep these two asserts?
m.m.capewell 2014/05/06 13:20:45 Done.
5179 STATIC_ASSERT(kSmiValueSize == 32 && kSmiShift == 32 && kSmiTag == 0);
5180 __ Store(value, UntagSmiFieldMemOperand(store_base, offset),
5181 Representation::Integer32());
5182 } else {
5183 __ Store(value, FieldMemOperand(store_base, offset), representation);
5184 }
5185 5210
5186 if (instr->hydrogen()->NeedsWriteBarrier()) { 5211 if (instr->hydrogen()->NeedsWriteBarrier()) {
5187 ASSERT(representation.IsTagged()); 5212 ASSERT(representation.IsTagged());
5188 // This assignment may cause element_addr to alias store_base. 5213 // This assignment may cause element_addr to alias store_base.
5189 Register element_addr = scratch; 5214 Register element_addr = scratch;
5190 SmiCheck check_needed = 5215 SmiCheck check_needed =
5191 instr->hydrogen()->value()->IsHeapObject() 5216 instr->hydrogen()->value()->IsHeapObject()
5192 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; 5217 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
5193 // Compute address of modified element and store it into key register. 5218 // Compute address of modified element and store it into key register.
5194 __ Add(element_addr, store_base, offset - kHeapObjectTag); 5219 __ Add(element_addr, store_base, mem_op.OffsetAsOperand());
ulan 2014/05/05 08:21:21 Using mem_op.base() instead of store_base would be
m.m.capewell 2014/05/06 13:20:45 Done.
5195 __ RecordWrite(elements, element_addr, value, GetLinkRegisterState(), 5220 __ RecordWrite(elements, element_addr, value, GetLinkRegisterState(),
5196 kSaveFPRegs, EMIT_REMEMBERED_SET, check_needed); 5221 kSaveFPRegs, EMIT_REMEMBERED_SET, check_needed);
5197 } 5222 }
5198 } 5223 }
5199 5224
5200 5225
5201 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 5226 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
5202 ASSERT(ToRegister(instr->context()).is(cp)); 5227 ASSERT(ToRegister(instr->context()).is(cp));
5203 ASSERT(ToRegister(instr->object()).Is(x2)); 5228 ASSERT(ToRegister(instr->object()).Is(x2));
5204 ASSERT(ToRegister(instr->key()).Is(x1)); 5229 ASSERT(ToRegister(instr->key()).Is(x1));
(...skipping 706 matching lines...) Expand 10 before | Expand all | Expand 10 after
5911 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 5936 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5912 // Index is equal to negated out of object property index plus 1. 5937 // Index is equal to negated out of object property index plus 1.
5913 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 5938 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
5914 __ Ldr(result, FieldMemOperand(result, 5939 __ Ldr(result, FieldMemOperand(result,
5915 FixedArray::kHeaderSize - kPointerSize)); 5940 FixedArray::kHeaderSize - kPointerSize));
5916 __ Bind(deferred->exit()); 5941 __ Bind(deferred->exit());
5917 __ Bind(&done); 5942 __ Bind(&done);
5918 } 5943 }
5919 5944
5920 } } // namespace v8::internal 5945 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm64/lithium-codegen-arm64.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698