OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/arm64/lithium-codegen-arm64.h" | 7 #include "src/arm64/lithium-codegen-arm64.h" |
8 #include "src/arm64/lithium-gap-resolver-arm64.h" | 8 #include "src/arm64/lithium-gap-resolver-arm64.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/hydrogen-osr.h" | 10 #include "src/hydrogen-osr.h" |
(...skipping 3507 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3518 } | 3518 } |
3519 | 3519 |
3520 | 3520 |
3521 MemOperand LCodeGen::PrepareKeyedArrayOperand(Register base, | 3521 MemOperand LCodeGen::PrepareKeyedArrayOperand(Register base, |
3522 Register elements, | 3522 Register elements, |
3523 Register key, | 3523 Register key, |
3524 bool key_is_tagged, | 3524 bool key_is_tagged, |
3525 ElementsKind elements_kind, | 3525 ElementsKind elements_kind, |
3526 Representation representation, | 3526 Representation representation, |
3527 int base_offset) { | 3527 int base_offset) { |
3528 STATIC_ASSERT((kSmiValueSize == kWRegSizeInBits) && (kSmiTag == 0)); | 3528 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0)); |
3529 int element_size_shift = ElementsKindToShiftSize(elements_kind); | 3529 int element_size_shift = ElementsKindToShiftSize(elements_kind); |
3530 | 3530 |
3531 // Even though the HLoad/StoreKeyed instructions force the input | 3531 // Even though the HLoad/StoreKeyed instructions force the input |
3532 // representation for the key to be an integer, the input gets replaced during | 3532 // representation for the key to be an integer, the input gets replaced during |
3533 // bounds check elimination with the index argument to the bounds check, which | 3533 // bounds check elimination with the index argument to the bounds check, which |
3534 // can be tagged, so that case must be handled here, too. | 3534 // can be tagged, so that case must be handled here, too. |
3535 if (key_is_tagged) { | 3535 if (key_is_tagged) { |
3536 __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift)); | 3536 __ Add(base, elements, Operand::UntagSmiAndScale(key, element_size_shift)); |
3537 if (representation.IsInteger32()) { | 3537 if (representation.IsInteger32()) { |
3538 ASSERT(elements_kind == FAST_SMI_ELEMENTS); | 3538 ASSERT(elements_kind == FAST_SMI_ELEMENTS); |
3539 // Read or write only the smi payload in the case of fast smi arrays. | 3539 // Read or write only the most-significant 32 bits in the case of fast smi |
| 3540 // arrays. |
3540 return UntagSmiMemOperand(base, base_offset); | 3541 return UntagSmiMemOperand(base, base_offset); |
3541 } else { | 3542 } else { |
3542 return MemOperand(base, base_offset); | 3543 return MemOperand(base, base_offset); |
3543 } | 3544 } |
3544 } else { | 3545 } else { |
3545 // Sign extend key because it could be a 32-bit negative value or contain | 3546 // Sign extend key because it could be a 32-bit negative value or contain |
3546 // garbage in the top 32-bits. The address computation happens in 64-bit. | 3547 // garbage in the top 32-bits. The address computation happens in 64-bit. |
3547 ASSERT((element_size_shift >= 0) && (element_size_shift <= 4)); | 3548 ASSERT((element_size_shift >= 0) && (element_size_shift <= 4)); |
3548 if (representation.IsInteger32()) { | 3549 if (representation.IsInteger32()) { |
3549 ASSERT(elements_kind == FAST_SMI_ELEMENTS); | 3550 ASSERT(elements_kind == FAST_SMI_ELEMENTS); |
3550 // Read or write only the smi payload in the case of fast smi arrays. | 3551 // Read or write only the most-significant 32 bits in the case of fast smi |
| 3552 // arrays. |
3551 __ Add(base, elements, Operand(key, SXTW, element_size_shift)); | 3553 __ Add(base, elements, Operand(key, SXTW, element_size_shift)); |
3552 return UntagSmiMemOperand(base, base_offset); | 3554 return UntagSmiMemOperand(base, base_offset); |
3553 } else { | 3555 } else { |
3554 __ Add(base, elements, base_offset); | 3556 __ Add(base, elements, base_offset); |
3555 return MemOperand(base, key, SXTW, element_size_shift); | 3557 return MemOperand(base, key, SXTW, element_size_shift); |
3556 } | 3558 } |
3557 } | 3559 } |
3558 } | 3560 } |
3559 | 3561 |
3560 | 3562 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3603 MemOperand mem_op; | 3605 MemOperand mem_op; |
3604 | 3606 |
3605 Representation representation = instr->hydrogen()->representation(); | 3607 Representation representation = instr->hydrogen()->representation(); |
3606 if (instr->key()->IsConstantOperand()) { | 3608 if (instr->key()->IsConstantOperand()) { |
3607 ASSERT(instr->temp() == NULL); | 3609 ASSERT(instr->temp() == NULL); |
3608 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 3610 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
3609 int offset = instr->base_offset() + | 3611 int offset = instr->base_offset() + |
3610 ToInteger32(const_operand) * kPointerSize; | 3612 ToInteger32(const_operand) * kPointerSize; |
3611 if (representation.IsInteger32()) { | 3613 if (representation.IsInteger32()) { |
3612 ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); | 3614 ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); |
3613 STATIC_ASSERT((kSmiValueSize == kWRegSizeInBits) && (kSmiTag == 0)); | 3615 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && |
| 3616 (kSmiTag == 0)); |
3614 mem_op = UntagSmiMemOperand(elements, offset); | 3617 mem_op = UntagSmiMemOperand(elements, offset); |
3615 } else { | 3618 } else { |
3616 mem_op = MemOperand(elements, offset); | 3619 mem_op = MemOperand(elements, offset); |
3617 } | 3620 } |
3618 } else { | 3621 } else { |
3619 Register load_base = ToRegister(instr->temp()); | 3622 Register load_base = ToRegister(instr->temp()); |
3620 Register key = ToRegister(instr->key()); | 3623 Register key = ToRegister(instr->key()); |
3621 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); | 3624 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); |
3622 | 3625 |
3623 mem_op = PrepareKeyedArrayOperand(load_base, elements, key, key_is_tagged, | 3626 mem_op = PrepareKeyedArrayOperand(load_base, elements, key, key_is_tagged, |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3673 source = object; | 3676 source = object; |
3674 } else { | 3677 } else { |
3675 // Load the properties array, using result as a scratch register. | 3678 // Load the properties array, using result as a scratch register. |
3676 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 3679 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
3677 source = result; | 3680 source = result; |
3678 } | 3681 } |
3679 | 3682 |
3680 if (access.representation().IsSmi() && | 3683 if (access.representation().IsSmi() && |
3681 instr->hydrogen()->representation().IsInteger32()) { | 3684 instr->hydrogen()->representation().IsInteger32()) { |
3682 // Read int value directly from upper half of the smi. | 3685 // Read int value directly from upper half of the smi. |
3683 STATIC_ASSERT((kSmiValueSize == kWRegSizeInBits) && (kSmiTag == 0)); | 3686 STATIC_ASSERT(kSmiValueSize == 32 && kSmiShift == 32 && kSmiTag == 0); |
3684 __ Load(result, UntagSmiFieldMemOperand(source, offset), | 3687 __ Load(result, UntagSmiFieldMemOperand(source, offset), |
3685 Representation::Integer32()); | 3688 Representation::Integer32()); |
3686 } else { | 3689 } else { |
3687 __ Load(result, FieldMemOperand(source, offset), access.representation()); | 3690 __ Load(result, FieldMemOperand(source, offset), access.representation()); |
3688 } | 3691 } |
3689 } | 3692 } |
3690 | 3693 |
3691 | 3694 |
3692 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { | 3695 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { |
3693 ASSERT(ToRegister(instr->context()).is(cp)); | 3696 ASSERT(ToRegister(instr->context()).is(cp)); |
(...skipping 1582 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5276 | 5279 |
5277 Representation representation = instr->hydrogen()->value()->representation(); | 5280 Representation representation = instr->hydrogen()->value()->representation(); |
5278 if (instr->key()->IsConstantOperand()) { | 5281 if (instr->key()->IsConstantOperand()) { |
5279 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 5282 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
5280 int offset = instr->base_offset() + | 5283 int offset = instr->base_offset() + |
5281 ToInteger32(const_operand) * kPointerSize; | 5284 ToInteger32(const_operand) * kPointerSize; |
5282 store_base = elements; | 5285 store_base = elements; |
5283 if (representation.IsInteger32()) { | 5286 if (representation.IsInteger32()) { |
5284 ASSERT(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); | 5287 ASSERT(instr->hydrogen()->store_mode() == STORE_TO_INITIALIZED_ENTRY); |
5285 ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); | 5288 ASSERT(instr->hydrogen()->elements_kind() == FAST_SMI_ELEMENTS); |
5286 STATIC_ASSERT((kSmiValueSize == kWRegSizeInBits) && (kSmiTag == 0)); | 5289 STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && |
| 5290 (kSmiTag == 0)); |
5287 mem_op = UntagSmiMemOperand(store_base, offset); | 5291 mem_op = UntagSmiMemOperand(store_base, offset); |
5288 } else { | 5292 } else { |
5289 mem_op = MemOperand(store_base, offset); | 5293 mem_op = MemOperand(store_base, offset); |
5290 } | 5294 } |
5291 } else { | 5295 } else { |
5292 store_base = scratch; | 5296 store_base = scratch; |
5293 key = ToRegister(instr->key()); | 5297 key = ToRegister(instr->key()); |
5294 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); | 5298 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi(); |
5295 | 5299 |
5296 mem_op = PrepareKeyedArrayOperand(store_base, elements, key, key_is_tagged, | 5300 mem_op = PrepareKeyedArrayOperand(store_base, elements, key, key_is_tagged, |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5395 Register temp0 = ToRegister(instr->temp0()); | 5399 Register temp0 = ToRegister(instr->temp0()); |
5396 __ Ldr(temp0, FieldMemOperand(destination, offset)); | 5400 __ Ldr(temp0, FieldMemOperand(destination, offset)); |
5397 __ AssertSmi(temp0); | 5401 __ AssertSmi(temp0); |
5398 // If destination aliased temp0, restore it to the address calculated | 5402 // If destination aliased temp0, restore it to the address calculated |
5399 // earlier. | 5403 // earlier. |
5400 if (destination.Is(temp0)) { | 5404 if (destination.Is(temp0)) { |
5401 ASSERT(!access.IsInobject()); | 5405 ASSERT(!access.IsInobject()); |
5402 __ Ldr(destination, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5406 __ Ldr(destination, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5403 } | 5407 } |
5404 #endif | 5408 #endif |
5405 STATIC_ASSERT((kSmiValueSize == kWRegSizeInBits) && (kSmiTag == 0)); | 5409 STATIC_ASSERT(kSmiValueSize == 32 && kSmiShift == 32 && kSmiTag == 0); |
5406 __ Store(value, UntagSmiFieldMemOperand(destination, offset), | 5410 __ Store(value, UntagSmiFieldMemOperand(destination, offset), |
5407 Representation::Integer32()); | 5411 Representation::Integer32()); |
5408 } else { | 5412 } else { |
5409 __ Store(value, FieldMemOperand(destination, offset), representation); | 5413 __ Store(value, FieldMemOperand(destination, offset), representation); |
5410 } | 5414 } |
5411 if (instr->hydrogen()->NeedsWriteBarrier()) { | 5415 if (instr->hydrogen()->NeedsWriteBarrier()) { |
5412 __ RecordWriteField(destination, | 5416 __ RecordWriteField(destination, |
5413 offset, | 5417 offset, |
5414 value, // Clobbered. | 5418 value, // Clobbered. |
5415 ToRegister(instr->temp1()), // Clobbered. | 5419 ToRegister(instr->temp1()), // Clobbered. |
(...skipping 622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6038 Handle<ScopeInfo> scope_info = instr->scope_info(); | 6042 Handle<ScopeInfo> scope_info = instr->scope_info(); |
6039 __ Push(scope_info); | 6043 __ Push(scope_info); |
6040 __ Push(ToRegister(instr->function())); | 6044 __ Push(ToRegister(instr->function())); |
6041 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6045 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6042 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6046 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6043 } | 6047 } |
6044 | 6048 |
6045 | 6049 |
6046 | 6050 |
6047 } } // namespace v8::internal | 6051 } } // namespace v8::internal |
OLD | NEW |