Index: src/x64/codegen-x64.cc |
=================================================================== |
--- src/x64/codegen-x64.cc (revision 4686) |
+++ src/x64/codegen-x64.cc (working copy) |
@@ -1858,7 +1858,8 @@ |
frame_->EmitPush(rax); // <- slot 3 |
frame_->EmitPush(rdx); // <- slot 2 |
- __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); |
+ __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); |
+ __ Integer32ToSmi(rax, rax); |
frame_->EmitPush(rax); // <- slot 1 |
frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
entry.Jump(); |
@@ -1869,7 +1870,8 @@ |
frame_->EmitPush(rax); // <- slot 2 |
// Push the length of the array and the initial index onto the stack. |
- __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
+ __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
+ __ Integer32ToSmi(rax, rax); |
frame_->EmitPush(rax); // <- slot 1 |
frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
@@ -3840,13 +3842,11 @@ |
__ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
Immediate(1 << Map::kIsUndetectable)); |
destination()->false_target()->Branch(not_zero); |
- __ movzxbq(kScratchRegister, |
- FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); |
- __ cmpq(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE)); |
- destination()->false_target()->Branch(below); |
- __ cmpq(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE)); |
+ __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE); |
+ destination()->false_target()->Branch(less); |
+ __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); |
obj.Unuse(); |
- destination()->Split(below_equal); |
+ destination()->Split(less_equal); |
} |
@@ -4338,7 +4338,7 @@ |
__ PrepareCallCFunction(0); |
__ CallCFunction(ExternalReference::random_uint32_function(), 0); |
- // Convert 32 random bits in rax to 0.(32 random bits) in a double |
+ // Convert 32 random bits in eax to 0.(32 random bits) in a double |
// by computing: |
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
__ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. |
@@ -4433,8 +4433,7 @@ |
__ Move(FieldOperand(rcx, HeapObject::kMapOffset), |
Factory::fixed_array_map()); |
// Set length. |
- __ Integer32ToSmi(rdx, rbx); |
- __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx); |
+ __ movl(FieldOperand(rcx, FixedArray::kLengthOffset), rbx); |
// Fill contents of fixed-array with the-hole. |
__ Move(rdx, Factory::the_hole_value()); |
__ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); |
@@ -4553,15 +4552,15 @@ |
// cache miss this optimization would hardly matter much. |
// Check if we could add new entry to cache. |
- __ movq(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
+ __ movl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
__ movq(r9, FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); |
- __ SmiCompare(rbx, r9); |
+ __ SmiToInteger32(r9, r9); |
+ __ cmpq(rbx, r9); |
__ j(greater, &add_new_entry); |
// Check if we could evict entry after finger. |
__ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
__ SmiToInteger32(rdx, rdx); |
- __ SmiToInteger32(rbx, rbx); |
__ addq(rdx, kEntrySizeImm); |
Label forward; |
__ cmpq(rbx, rdx); |
@@ -4573,8 +4572,9 @@ |
__ jmp(&update_cache); |
__ bind(&add_new_entry); |
- // r9 holds cache size as smi. |
- __ SmiToInteger32(rdx, r9); |
+ // r9 holds cache size as int. |
+ __ movq(rdx, r9); |
+ __ Integer32ToSmi(r9, r9); |
__ SmiAddConstant(rbx, r9, Smi::FromInt(JSFunctionResultCache::kEntrySize)); |
__ movq(FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); |
@@ -7132,8 +7132,13 @@ |
Result elements = allocator()->Allocate(); |
ASSERT(elements.is_valid()); |
+ // Use a fresh temporary for the index and later the loaded |
+ // value. |
+ Result index = allocator()->Allocate(); |
+ ASSERT(index.is_valid()); |
+ |
DeferredReferenceGetKeyedValue* deferred = |
- new DeferredReferenceGetKeyedValue(elements.reg(), |
+ new DeferredReferenceGetKeyedValue(index.reg(), |
receiver.reg(), |
key.reg(), |
is_global); |
@@ -7169,21 +7174,31 @@ |
Factory::fixed_array_map()); |
deferred->Branch(not_equal); |
- // Check that key is within bounds. |
- __ SmiCompare(key.reg(), |
- FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
+ // Shift the key to get the actual index value and check that |
+ // it is within bounds. |
+ __ SmiToInteger32(index.reg(), key.reg()); |
+ __ cmpl(index.reg(), |
+ FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
deferred->Branch(above_equal); |
- // The key register holds the smi-tagged key. Load the value and |
- // check that it is not the hole value. |
- Result value = elements; |
- SmiIndex index = |
- masm_->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); |
+ // The index register holds the un-smi-tagged key. It has been |
+ // zero-extended to 64-bits, so it can be used directly as index in the |
+ // operand below. |
+ // Load and check that the result is not the hole. We could |
+ // reuse the index or elements register for the value. |
+ // |
+ // TODO(206): Consider whether it makes sense to try some |
+ // heuristic about which register to reuse. For example, if |
+ // one is rax, the we can reuse that one because the value |
+ // coming from the deferred code will be in rax. |
+ Result value = index; |
__ movq(value.reg(), |
- FieldOperand(elements.reg(), |
- index.reg, |
- index.scale, |
- FixedArray::kHeaderSize)); |
+ Operand(elements.reg(), |
+ index.reg(), |
+ times_pointer_size, |
+ FixedArray::kHeaderSize - kHeapObjectTag)); |
+ elements.Unuse(); |
+ index.Unuse(); |
__ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex); |
deferred->Branch(equal); |
__ IncrementCounter(&Counters::keyed_load_inline, 1); |
@@ -7482,7 +7497,7 @@ |
// Check whether it is possible to omit the write barrier. If the |
// elements array is in new space or the value written is a smi we can |
- // safely update the elements array without write barrier. |
+ // safely update the elements array without updating the remembered set. |
Label in_new_space; |
__ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); |
if (!value_is_constant) { |
@@ -7507,10 +7522,10 @@ |
// Store the value. |
SmiIndex index = |
masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); |
- __ movq(FieldOperand(tmp.reg(), |
- index.reg, |
- index.scale, |
- FixedArray::kHeaderSize), |
+ __ movq(Operand(tmp.reg(), |
+ index.reg, |
+ index.scale, |
+ FixedArray::kHeaderSize - kHeapObjectTag), |
value.reg()); |
__ IncrementCounter(&Counters::keyed_store_inline, 1); |
@@ -7592,7 +7607,7 @@ |
// Setup the object header. |
__ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); |
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
- __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
+ __ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length)); |
// Setup the fixed slots. |
__ xor_(rbx, rbx); // Set to NULL. |
@@ -8267,8 +8282,7 @@ |
// Check that the last match info has space for the capture registers and the |
// additional information. Ensure no overflow in add. |
ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); |
- __ movq(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); |
- __ SmiToInteger32(rax, rax); |
+ __ movl(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); |
__ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); |
__ cmpl(rdx, rax); |
__ j(greater, &runtime); |
@@ -8546,10 +8560,9 @@ |
// Make the hash mask from the length of the number string cache. It |
// contains two elements (number and string) for each cache entry. |
- __ movq(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
- // Divide smi tagged length by two. |
- __ PositiveSmiDivPowerOfTwoToInteger32(mask, mask, 1); |
- __ subq(mask, Immediate(1)); // Make mask. |
+ __ movl(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
+ __ shrl(mask, Immediate(1)); // Divide length by two (length is not a smi). |
+ __ subl(mask, Immediate(1)); // Make mask. |
// Calculate the entry in the number string cache. The hash value in the |
// number string cache for smis is just the smi value, and the hash for |
@@ -9069,6 +9082,7 @@ |
// Get the parameters pointer from the stack and untag the length. |
__ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
+ __ SmiToInteger32(rcx, rcx); |
// Setup the elements pointer in the allocated arguments object and |
// initialize the header in the elements fixed array. |
@@ -9076,8 +9090,7 @@ |
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
__ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
__ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
- __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
- __ SmiToInteger32(rcx, rcx); // Untag length for the loop below. |
+ __ movl(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
// Copy the fixed array slots. |
Label loop; |
@@ -10848,7 +10861,7 @@ |
__ bind(&allocated); |
// Fill the fields of the cons string. |
__ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); |
- __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset), |
+ __ movl(FieldOperand(rcx, ConsString::kHashFieldOffset), |
Immediate(String::kEmptyHashField)); |
__ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); |
__ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); |