OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1840 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1851 // the object) | 1851 // the object) |
1852 __ movq(rcx, rax); | 1852 __ movq(rcx, rax); |
1853 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset)); | 1853 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset)); |
1854 // Get the bridge array held in the enumeration index field. | 1854 // Get the bridge array held in the enumeration index field. |
1855 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); | 1855 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); |
1856 // Get the cache from the bridge array. | 1856 // Get the cache from the bridge array. |
1857 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1857 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1858 | 1858 |
1859 frame_->EmitPush(rax); // <- slot 3 | 1859 frame_->EmitPush(rax); // <- slot 3 |
1860 frame_->EmitPush(rdx); // <- slot 2 | 1860 frame_->EmitPush(rdx); // <- slot 2 |
1861 __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); | 1861 __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); |
| 1862 __ Integer32ToSmi(rax, rax); |
1862 frame_->EmitPush(rax); // <- slot 1 | 1863 frame_->EmitPush(rax); // <- slot 1 |
1863 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 | 1864 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
1864 entry.Jump(); | 1865 entry.Jump(); |
1865 | 1866 |
1866 fixed_array.Bind(); | 1867 fixed_array.Bind(); |
1867 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast) | 1868 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast) |
1868 frame_->EmitPush(Smi::FromInt(0)); // <- slot 3 | 1869 frame_->EmitPush(Smi::FromInt(0)); // <- slot 3 |
1869 frame_->EmitPush(rax); // <- slot 2 | 1870 frame_->EmitPush(rax); // <- slot 2 |
1870 | 1871 |
1871 // Push the length of the array and the initial index onto the stack. | 1872 // Push the length of the array and the initial index onto the stack. |
1872 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); | 1873 __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
| 1874 __ Integer32ToSmi(rax, rax); |
1873 frame_->EmitPush(rax); // <- slot 1 | 1875 frame_->EmitPush(rax); // <- slot 1 |
1874 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 | 1876 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
1875 | 1877 |
1876 // Condition. | 1878 // Condition. |
1877 entry.Bind(); | 1879 entry.Bind(); |
1878 // Grab the current frame's height for the break and continue | 1880 // Grab the current frame's height for the break and continue |
1879 // targets only after all the state is pushed on the frame. | 1881 // targets only after all the state is pushed on the frame. |
1880 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); | 1882 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
1881 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); | 1883 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
1882 | 1884 |
(...skipping 1950 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3833 | 3835 |
3834 __ Move(kScratchRegister, Factory::null_value()); | 3836 __ Move(kScratchRegister, Factory::null_value()); |
3835 __ cmpq(obj.reg(), kScratchRegister); | 3837 __ cmpq(obj.reg(), kScratchRegister); |
3836 destination()->true_target()->Branch(equal); | 3838 destination()->true_target()->Branch(equal); |
3837 | 3839 |
3838 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset)); | 3840 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
3839 // Undetectable objects behave like undefined when tested with typeof. | 3841 // Undetectable objects behave like undefined when tested with typeof. |
3840 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), | 3842 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
3841 Immediate(1 << Map::kIsUndetectable)); | 3843 Immediate(1 << Map::kIsUndetectable)); |
3842 destination()->false_target()->Branch(not_zero); | 3844 destination()->false_target()->Branch(not_zero); |
3843 __ movzxbq(kScratchRegister, | 3845 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE); |
3844 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); | 3846 destination()->false_target()->Branch(less); |
3845 __ cmpq(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE)); | 3847 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); |
3846 destination()->false_target()->Branch(below); | |
3847 __ cmpq(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE)); | |
3848 obj.Unuse(); | 3848 obj.Unuse(); |
3849 destination()->Split(below_equal); | 3849 destination()->Split(less_equal); |
3850 } | 3850 } |
3851 | 3851 |
3852 | 3852 |
3853 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) { | 3853 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) { |
3854 // This generates a fast version of: | 3854 // This generates a fast version of: |
3855 // (%_ClassOf(arg) === 'Function') | 3855 // (%_ClassOf(arg) === 'Function') |
3856 ASSERT(args->length() == 1); | 3856 ASSERT(args->length() == 1); |
3857 Load(args->at(0)); | 3857 Load(args->at(0)); |
3858 Result obj = frame_->Pop(); | 3858 Result obj = frame_->Pop(); |
3859 obj.ToRegister(); | 3859 obj.ToRegister(); |
(...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4331 __ CallRuntime(Runtime::kNumberUnaryMinus, 1); | 4331 __ CallRuntime(Runtime::kNumberUnaryMinus, 1); |
4332 __ movq(rbx, rax); | 4332 __ movq(rbx, rax); |
4333 | 4333 |
4334 __ bind(&heapnumber_allocated); | 4334 __ bind(&heapnumber_allocated); |
4335 | 4335 |
4336 // Return a random uint32 number in rax. | 4336 // Return a random uint32 number in rax. |
4337 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. | 4337 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. |
4338 __ PrepareCallCFunction(0); | 4338 __ PrepareCallCFunction(0); |
4339 __ CallCFunction(ExternalReference::random_uint32_function(), 0); | 4339 __ CallCFunction(ExternalReference::random_uint32_function(), 0); |
4340 | 4340 |
4341 // Convert 32 random bits in rax to 0.(32 random bits) in a double | 4341 // Convert 32 random bits in eax to 0.(32 random bits) in a double |
4342 // by computing: | 4342 // by computing: |
4343 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). | 4343 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
4344 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. | 4344 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. |
4345 __ movd(xmm1, rcx); | 4345 __ movd(xmm1, rcx); |
4346 __ movd(xmm0, rax); | 4346 __ movd(xmm0, rax); |
4347 __ cvtss2sd(xmm1, xmm1); | 4347 __ cvtss2sd(xmm1, xmm1); |
4348 __ xorpd(xmm0, xmm1); | 4348 __ xorpd(xmm0, xmm1); |
4349 __ subsd(xmm0, xmm1); | 4349 __ subsd(xmm0, xmm1); |
4350 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0); | 4350 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0); |
4351 | 4351 |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4426 | 4426 |
4427 // Fill out the elements FixedArray. | 4427 // Fill out the elements FixedArray. |
4428 // rax: JSArray. | 4428 // rax: JSArray. |
4429 // rcx: FixedArray. | 4429 // rcx: FixedArray. |
4430 // rbx: Number of elements in array as int32. | 4430 // rbx: Number of elements in array as int32. |
4431 | 4431 |
4432 // Set map. | 4432 // Set map. |
4433 __ Move(FieldOperand(rcx, HeapObject::kMapOffset), | 4433 __ Move(FieldOperand(rcx, HeapObject::kMapOffset), |
4434 Factory::fixed_array_map()); | 4434 Factory::fixed_array_map()); |
4435 // Set length. | 4435 // Set length. |
4436 __ Integer32ToSmi(rdx, rbx); | 4436 __ movl(FieldOperand(rcx, FixedArray::kLengthOffset), rbx); |
4437 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx); | |
4438 // Fill contents of fixed-array with the-hole. | 4437 // Fill contents of fixed-array with the-hole. |
4439 __ Move(rdx, Factory::the_hole_value()); | 4438 __ Move(rdx, Factory::the_hole_value()); |
4440 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); | 4439 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); |
4441 // Fill fixed array elements with hole. | 4440 // Fill fixed array elements with hole. |
4442 // rax: JSArray. | 4441 // rax: JSArray. |
4443 // rbx: Number of elements in array that remains to be filled, as int32. | 4442 // rbx: Number of elements in array that remains to be filled, as int32. |
4444 // rcx: Start of elements in FixedArray. | 4443 // rcx: Start of elements in FixedArray. |
4445 // rdx: the hole. | 4444 // rdx: the hole. |
4446 Label loop; | 4445 Label loop; |
4447 __ testl(rbx, rbx); | 4446 __ testl(rbx, rbx); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4546 __ InvokeFunction(rdi, expected, CALL_FUNCTION); | 4545 __ InvokeFunction(rdi, expected, CALL_FUNCTION); |
4547 | 4546 |
4548 // Find a place to put new cached value into. | 4547 // Find a place to put new cached value into. |
4549 Label add_new_entry, update_cache; | 4548 Label add_new_entry, update_cache; |
4550 __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache | 4549 __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache |
4551 // Possible optimization: cache size is constant for the given cache | 4550 // Possible optimization: cache size is constant for the given cache |
4552 // so technically we could use a constant here. However, if we have | 4551 // so technically we could use a constant here. However, if we have |
4553 // cache miss this optimization would hardly matter much. | 4552 // cache miss this optimization would hardly matter much. |
4554 | 4553 |
4555 // Check if we could add new entry to cache. | 4554 // Check if we could add new entry to cache. |
4556 __ movq(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); | 4555 __ movl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
4557 __ movq(r9, FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); | 4556 __ movq(r9, FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); |
4558 __ SmiCompare(rbx, r9); | 4557 __ SmiToInteger32(r9, r9); |
| 4558 __ cmpq(rbx, r9); |
4559 __ j(greater, &add_new_entry); | 4559 __ j(greater, &add_new_entry); |
4560 | 4560 |
4561 // Check if we could evict entry after finger. | 4561 // Check if we could evict entry after finger. |
4562 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); | 4562 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
4563 __ SmiToInteger32(rdx, rdx); | 4563 __ SmiToInteger32(rdx, rdx); |
4564 __ SmiToInteger32(rbx, rbx); | |
4565 __ addq(rdx, kEntrySizeImm); | 4564 __ addq(rdx, kEntrySizeImm); |
4566 Label forward; | 4565 Label forward; |
4567 __ cmpq(rbx, rdx); | 4566 __ cmpq(rbx, rdx); |
4568 __ j(greater, &forward); | 4567 __ j(greater, &forward); |
4569 // Need to wrap over the cache. | 4568 // Need to wrap over the cache. |
4570 __ movq(rdx, kEntriesIndexImm); | 4569 __ movq(rdx, kEntriesIndexImm); |
4571 __ bind(&forward); | 4570 __ bind(&forward); |
4572 __ Integer32ToSmi(r9, rdx); | 4571 __ Integer32ToSmi(r9, rdx); |
4573 __ jmp(&update_cache); | 4572 __ jmp(&update_cache); |
4574 | 4573 |
4575 __ bind(&add_new_entry); | 4574 __ bind(&add_new_entry); |
4576 // r9 holds cache size as smi. | 4575 // r9 holds cache size as int. |
4577 __ SmiToInteger32(rdx, r9); | 4576 __ movq(rdx, r9); |
| 4577 __ Integer32ToSmi(r9, r9); |
4578 __ SmiAddConstant(rbx, r9, Smi::FromInt(JSFunctionResultCache::kEntrySize)); | 4578 __ SmiAddConstant(rbx, r9, Smi::FromInt(JSFunctionResultCache::kEntrySize)); |
4579 __ movq(FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); | 4579 __ movq(FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); |
4580 | 4580 |
4581 // Update the cache itself. | 4581 // Update the cache itself. |
4582 // rdx holds the index as int. | 4582 // rdx holds the index as int. |
4583 // r9 holds the index as smi. | 4583 // r9 holds the index as smi. |
4584 __ bind(&update_cache); | 4584 __ bind(&update_cache); |
4585 __ pop(rbx); // restore the key | 4585 __ pop(rbx); // restore the key |
4586 __ movq(FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9); | 4586 __ movq(FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9); |
4587 // Store key. | 4587 // Store key. |
(...skipping 2537 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7125 Result key = frame_->Pop(); | 7125 Result key = frame_->Pop(); |
7126 Result receiver = frame_->Pop(); | 7126 Result receiver = frame_->Pop(); |
7127 key.ToRegister(); | 7127 key.ToRegister(); |
7128 receiver.ToRegister(); | 7128 receiver.ToRegister(); |
7129 | 7129 |
7130 // Use a fresh temporary to load the elements without destroying | 7130 // Use a fresh temporary to load the elements without destroying |
7131 // the receiver which is needed for the deferred slow case. | 7131 // the receiver which is needed for the deferred slow case. |
7132 Result elements = allocator()->Allocate(); | 7132 Result elements = allocator()->Allocate(); |
7133 ASSERT(elements.is_valid()); | 7133 ASSERT(elements.is_valid()); |
7134 | 7134 |
| 7135 // Use a fresh temporary for the index and later the loaded |
| 7136 // value. |
| 7137 Result index = allocator()->Allocate(); |
| 7138 ASSERT(index.is_valid()); |
| 7139 |
7135 DeferredReferenceGetKeyedValue* deferred = | 7140 DeferredReferenceGetKeyedValue* deferred = |
7136 new DeferredReferenceGetKeyedValue(elements.reg(), | 7141 new DeferredReferenceGetKeyedValue(index.reg(), |
7137 receiver.reg(), | 7142 receiver.reg(), |
7138 key.reg(), | 7143 key.reg(), |
7139 is_global); | 7144 is_global); |
7140 | 7145 |
7141 // Check that the receiver is not a smi (only needed if this | 7146 // Check that the receiver is not a smi (only needed if this |
7142 // is not a load from the global context) and that it has the | 7147 // is not a load from the global context) and that it has the |
7143 // expected map. | 7148 // expected map. |
7144 if (!is_global) { | 7149 if (!is_global) { |
7145 __ JumpIfSmi(receiver.reg(), deferred->entry_label()); | 7150 __ JumpIfSmi(receiver.reg(), deferred->entry_label()); |
7146 } | 7151 } |
(...skipping 15 matching lines...) Expand all Loading... |
7162 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label()); | 7167 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label()); |
7163 | 7168 |
7164 // Get the elements array from the receiver and check that it | 7169 // Get the elements array from the receiver and check that it |
7165 // is not a dictionary. | 7170 // is not a dictionary. |
7166 __ movq(elements.reg(), | 7171 __ movq(elements.reg(), |
7167 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 7172 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
7168 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), | 7173 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), |
7169 Factory::fixed_array_map()); | 7174 Factory::fixed_array_map()); |
7170 deferred->Branch(not_equal); | 7175 deferred->Branch(not_equal); |
7171 | 7176 |
7172 // Check that key is within bounds. | 7177 // Shift the key to get the actual index value and check that |
7173 __ SmiCompare(key.reg(), | 7178 // it is within bounds. |
7174 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); | 7179 __ SmiToInteger32(index.reg(), key.reg()); |
| 7180 __ cmpl(index.reg(), |
| 7181 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
7175 deferred->Branch(above_equal); | 7182 deferred->Branch(above_equal); |
7176 | 7183 |
7177 // The key register holds the smi-tagged key. Load the value and | 7184 // The index register holds the un-smi-tagged key. It has been |
7178 // check that it is not the hole value. | 7185 // zero-extended to 64-bits, so it can be used directly as index in the |
7179 Result value = elements; | 7186 // operand below. |
7180 SmiIndex index = | 7187 // Load and check that the result is not the hole. We could |
7181 masm_->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); | 7188 // reuse the index or elements register for the value. |
| 7189 // |
| 7190 // TODO(206): Consider whether it makes sense to try some |
| 7191 // heuristic about which register to reuse. For example, if |
| 7192 // one is rax, the we can reuse that one because the value |
| 7193 // coming from the deferred code will be in rax. |
| 7194 Result value = index; |
7182 __ movq(value.reg(), | 7195 __ movq(value.reg(), |
7183 FieldOperand(elements.reg(), | 7196 Operand(elements.reg(), |
7184 index.reg, | 7197 index.reg(), |
7185 index.scale, | 7198 times_pointer_size, |
7186 FixedArray::kHeaderSize)); | 7199 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 7200 elements.Unuse(); |
| 7201 index.Unuse(); |
7187 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex); | 7202 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex); |
7188 deferred->Branch(equal); | 7203 deferred->Branch(equal); |
7189 __ IncrementCounter(&Counters::keyed_load_inline, 1); | 7204 __ IncrementCounter(&Counters::keyed_load_inline, 1); |
7190 | 7205 |
7191 deferred->BindExit(); | 7206 deferred->BindExit(); |
7192 // Restore the receiver and key to the frame and push the | 7207 // Restore the receiver and key to the frame and push the |
7193 // result on top of it. | 7208 // result on top of it. |
7194 frame_->Push(&receiver); | 7209 frame_->Push(&receiver); |
7195 frame_->Push(&key); | 7210 frame_->Push(&key); |
7196 return value; | 7211 return value; |
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7475 key.reg()); | 7490 key.reg()); |
7476 deferred->Branch(below_equal); | 7491 deferred->Branch(below_equal); |
7477 | 7492 |
7478 // Get the elements array from the receiver and check that it | 7493 // Get the elements array from the receiver and check that it |
7479 // is a flat array (not a dictionary). | 7494 // is a flat array (not a dictionary). |
7480 __ movq(tmp.reg(), | 7495 __ movq(tmp.reg(), |
7481 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 7496 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
7482 | 7497 |
7483 // Check whether it is possible to omit the write barrier. If the | 7498 // Check whether it is possible to omit the write barrier. If the |
7484 // elements array is in new space or the value written is a smi we can | 7499 // elements array is in new space or the value written is a smi we can |
7485 // safely update the elements array without write barrier. | 7500 // safely update the elements array without updating the remembered set. |
7486 Label in_new_space; | 7501 Label in_new_space; |
7487 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); | 7502 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); |
7488 if (!value_is_constant) { | 7503 if (!value_is_constant) { |
7489 __ JumpIfNotSmi(value.reg(), deferred->entry_label()); | 7504 __ JumpIfNotSmi(value.reg(), deferred->entry_label()); |
7490 } | 7505 } |
7491 | 7506 |
7492 __ bind(&in_new_space); | 7507 __ bind(&in_new_space); |
7493 // Bind the deferred code patch site to be able to locate the | 7508 // Bind the deferred code patch site to be able to locate the |
7494 // fixed array map comparison. When debugging, we patch this | 7509 // fixed array map comparison. When debugging, we patch this |
7495 // comparison to always fail so that we will hit the IC call | 7510 // comparison to always fail so that we will hit the IC call |
7496 // in the deferred code which will allow the debugger to | 7511 // in the deferred code which will allow the debugger to |
7497 // break for fast case stores. | 7512 // break for fast case stores. |
7498 __ bind(deferred->patch_site()); | 7513 __ bind(deferred->patch_site()); |
7499 // Avoid using __ to ensure the distance from patch_site | 7514 // Avoid using __ to ensure the distance from patch_site |
7500 // to the map address is always the same. | 7515 // to the map address is always the same. |
7501 masm->movq(kScratchRegister, Factory::fixed_array_map(), | 7516 masm->movq(kScratchRegister, Factory::fixed_array_map(), |
7502 RelocInfo::EMBEDDED_OBJECT); | 7517 RelocInfo::EMBEDDED_OBJECT); |
7503 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), | 7518 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), |
7504 kScratchRegister); | 7519 kScratchRegister); |
7505 deferred->Branch(not_equal); | 7520 deferred->Branch(not_equal); |
7506 | 7521 |
7507 // Store the value. | 7522 // Store the value. |
7508 SmiIndex index = | 7523 SmiIndex index = |
7509 masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); | 7524 masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); |
7510 __ movq(FieldOperand(tmp.reg(), | 7525 __ movq(Operand(tmp.reg(), |
7511 index.reg, | 7526 index.reg, |
7512 index.scale, | 7527 index.scale, |
7513 FixedArray::kHeaderSize), | 7528 FixedArray::kHeaderSize - kHeapObjectTag), |
7514 value.reg()); | 7529 value.reg()); |
7515 __ IncrementCounter(&Counters::keyed_store_inline, 1); | 7530 __ IncrementCounter(&Counters::keyed_store_inline, 1); |
7516 | 7531 |
7517 deferred->BindExit(); | 7532 deferred->BindExit(); |
7518 | 7533 |
7519 cgen_->frame()->Push(&receiver); | 7534 cgen_->frame()->Push(&receiver); |
7520 cgen_->frame()->Push(&key); | 7535 cgen_->frame()->Push(&key); |
7521 cgen_->frame()->Push(&value); | 7536 cgen_->frame()->Push(&value); |
7522 } else { | 7537 } else { |
7523 Result answer = cgen_->frame()->CallKeyedStoreIC(); | 7538 Result answer = cgen_->frame()->CallKeyedStoreIC(); |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7585 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 7600 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
7586 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | 7601 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, |
7587 rax, rbx, rcx, &gc, TAG_OBJECT); | 7602 rax, rbx, rcx, &gc, TAG_OBJECT); |
7588 | 7603 |
7589 // Get the function from the stack. | 7604 // Get the function from the stack. |
7590 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 7605 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
7591 | 7606 |
7592 // Setup the object header. | 7607 // Setup the object header. |
7593 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); | 7608 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); |
7594 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 7609 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
7595 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 7610 __ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length)); |
7596 | 7611 |
7597 // Setup the fixed slots. | 7612 // Setup the fixed slots. |
7598 __ xor_(rbx, rbx); // Set to NULL. | 7613 __ xor_(rbx, rbx); // Set to NULL. |
7599 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); | 7614 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); |
7600 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax); | 7615 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax); |
7601 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx); | 7616 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx); |
7602 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); | 7617 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); |
7603 | 7618 |
7604 // Copy the global object from the surrounding context. | 7619 // Copy the global object from the surrounding context. |
7605 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 7620 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
(...skipping 654 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8260 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); | 8275 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); |
8261 __ j(not_equal, &runtime); | 8276 __ j(not_equal, &runtime); |
8262 // Check that the JSArray is in fast case. | 8277 // Check that the JSArray is in fast case. |
8263 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); | 8278 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); |
8264 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); | 8279 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); |
8265 __ Cmp(rax, Factory::fixed_array_map()); | 8280 __ Cmp(rax, Factory::fixed_array_map()); |
8266 __ j(not_equal, &runtime); | 8281 __ j(not_equal, &runtime); |
8267 // Check that the last match info has space for the capture registers and the | 8282 // Check that the last match info has space for the capture registers and the |
8268 // additional information. Ensure no overflow in add. | 8283 // additional information. Ensure no overflow in add. |
8269 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); | 8284 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); |
8270 __ movq(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); | 8285 __ movl(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); |
8271 __ SmiToInteger32(rax, rax); | |
8272 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); | 8286 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); |
8273 __ cmpl(rdx, rax); | 8287 __ cmpl(rdx, rax); |
8274 __ j(greater, &runtime); | 8288 __ j(greater, &runtime); |
8275 | 8289 |
8276 // ecx: RegExp data (FixedArray) | 8290 // ecx: RegExp data (FixedArray) |
8277 // Check the representation and encoding of the subject string. | 8291 // Check the representation and encoding of the subject string. |
8278 Label seq_string, seq_two_byte_string, check_code; | 8292 Label seq_string, seq_two_byte_string, check_code; |
8279 const int kStringRepresentationEncodingMask = | 8293 const int kStringRepresentationEncodingMask = |
8280 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 8294 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; |
8281 __ movq(rax, Operand(rsp, kSubjectOffset)); | 8295 __ movq(rax, Operand(rsp, kSubjectOffset)); |
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8539 // Use of registers. Register result is used as a temporary. | 8553 // Use of registers. Register result is used as a temporary. |
8540 Register number_string_cache = result; | 8554 Register number_string_cache = result; |
8541 Register mask = scratch1; | 8555 Register mask = scratch1; |
8542 Register scratch = scratch2; | 8556 Register scratch = scratch2; |
8543 | 8557 |
8544 // Load the number string cache. | 8558 // Load the number string cache. |
8545 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); | 8559 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); |
8546 | 8560 |
8547 // Make the hash mask from the length of the number string cache. It | 8561 // Make the hash mask from the length of the number string cache. It |
8548 // contains two elements (number and string) for each cache entry. | 8562 // contains two elements (number and string) for each cache entry. |
8549 __ movq(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); | 8563 __ movl(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
8550 // Divide smi tagged length by two. | 8564 __ shrl(mask, Immediate(1)); // Divide length by two (length is not a smi). |
8551 __ PositiveSmiDivPowerOfTwoToInteger32(mask, mask, 1); | 8565 __ subl(mask, Immediate(1)); // Make mask. |
8552 __ subq(mask, Immediate(1)); // Make mask. | |
8553 | 8566 |
8554 // Calculate the entry in the number string cache. The hash value in the | 8567 // Calculate the entry in the number string cache. The hash value in the |
8555 // number string cache for smis is just the smi value, and the hash for | 8568 // number string cache for smis is just the smi value, and the hash for |
8556 // doubles is the xor of the upper and lower words. See | 8569 // doubles is the xor of the upper and lower words. See |
8557 // Heap::GetNumberStringCache. | 8570 // Heap::GetNumberStringCache. |
8558 Label is_smi; | 8571 Label is_smi; |
8559 Label load_result_from_cache; | 8572 Label load_result_from_cache; |
8560 if (!object_is_smi) { | 8573 if (!object_is_smi) { |
8561 __ JumpIfSmi(object, &is_smi); | 8574 __ JumpIfSmi(object, &is_smi); |
8562 __ CheckMap(object, Factory::heap_number_map(), not_found, true); | 8575 __ CheckMap(object, Factory::heap_number_map(), not_found, true); |
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9062 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 9075 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
9063 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx); | 9076 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx); |
9064 | 9077 |
9065 // If there are no actual arguments, we're done. | 9078 // If there are no actual arguments, we're done. |
9066 Label done; | 9079 Label done; |
9067 __ testq(rcx, rcx); | 9080 __ testq(rcx, rcx); |
9068 __ j(zero, &done); | 9081 __ j(zero, &done); |
9069 | 9082 |
9070 // Get the parameters pointer from the stack and untag the length. | 9083 // Get the parameters pointer from the stack and untag the length. |
9071 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); | 9084 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
| 9085 __ SmiToInteger32(rcx, rcx); |
9072 | 9086 |
9073 // Setup the elements pointer in the allocated arguments object and | 9087 // Setup the elements pointer in the allocated arguments object and |
9074 // initialize the header in the elements fixed array. | 9088 // initialize the header in the elements fixed array. |
9075 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); | 9089 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); |
9076 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 9090 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
9077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 9091 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
9078 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); | 9092 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
9079 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 9093 __ movl(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
9080 __ SmiToInteger32(rcx, rcx); // Untag length for the loop below. | |
9081 | 9094 |
9082 // Copy the fixed array slots. | 9095 // Copy the fixed array slots. |
9083 Label loop; | 9096 Label loop; |
9084 __ bind(&loop); | 9097 __ bind(&loop); |
9085 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver. | 9098 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver. |
9086 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister); | 9099 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister); |
9087 __ addq(rdi, Immediate(kPointerSize)); | 9100 __ addq(rdi, Immediate(kPointerSize)); |
9088 __ subq(rdx, Immediate(kPointerSize)); | 9101 __ subq(rdx, Immediate(kPointerSize)); |
9089 __ decq(rcx); | 9102 __ decq(rcx); |
9090 __ j(not_zero, &loop); | 9103 __ j(not_zero, &loop); |
(...skipping 1750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10841 __ movl(rcx, r8); | 10854 __ movl(rcx, r8); |
10842 __ and_(rcx, r9); | 10855 __ and_(rcx, r9); |
10843 ASSERT(kStringEncodingMask == kAsciiStringTag); | 10856 ASSERT(kStringEncodingMask == kAsciiStringTag); |
10844 __ testl(rcx, Immediate(kAsciiStringTag)); | 10857 __ testl(rcx, Immediate(kAsciiStringTag)); |
10845 __ j(zero, &non_ascii); | 10858 __ j(zero, &non_ascii); |
10846 // Allocate an acsii cons string. | 10859 // Allocate an acsii cons string. |
10847 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime); | 10860 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime); |
10848 __ bind(&allocated); | 10861 __ bind(&allocated); |
10849 // Fill the fields of the cons string. | 10862 // Fill the fields of the cons string. |
10850 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); | 10863 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); |
10851 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset), | 10864 __ movl(FieldOperand(rcx, ConsString::kHashFieldOffset), |
10852 Immediate(String::kEmptyHashField)); | 10865 Immediate(String::kEmptyHashField)); |
10853 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); | 10866 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); |
10854 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); | 10867 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); |
10855 __ movq(rax, rcx); | 10868 __ movq(rax, rcx); |
10856 __ IncrementCounter(&Counters::string_add_native, 1); | 10869 __ IncrementCounter(&Counters::string_add_native, 1); |
10857 __ ret(2 * kPointerSize); | 10870 __ ret(2 * kPointerSize); |
10858 __ bind(&non_ascii); | 10871 __ bind(&non_ascii); |
10859 // Allocate a two byte cons string. | 10872 // Allocate a two byte cons string. |
10860 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime); | 10873 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime); |
10861 __ jmp(&allocated); | 10874 __ jmp(&allocated); |
(...skipping 717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
11579 } | 11592 } |
11580 | 11593 |
11581 #endif | 11594 #endif |
11582 | 11595 |
11583 | 11596 |
11584 #undef __ | 11597 #undef __ |
11585 | 11598 |
11586 } } // namespace v8::internal | 11599 } } // namespace v8::internal |
11587 | 11600 |
11588 #endif // V8_TARGET_ARCH_X64 | 11601 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |