| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 4180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4191 | 4191 |
| 4192 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); | 4192 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); |
| 4193 // Get the bridge array held in the enumeration index field. | 4193 // Get the bridge array held in the enumeration index field. |
| 4194 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); | 4194 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); |
| 4195 // Get the cache from the bridge array. | 4195 // Get the cache from the bridge array. |
| 4196 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 4196 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 4197 | 4197 |
| 4198 frame_->EmitPush(eax); // <- slot 3 | 4198 frame_->EmitPush(eax); // <- slot 3 |
| 4199 frame_->EmitPush(edx); // <- slot 2 | 4199 frame_->EmitPush(edx); // <- slot 2 |
| 4200 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset)); | 4200 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset)); |
| 4201 __ SmiTag(eax); | |
| 4202 frame_->EmitPush(eax); // <- slot 1 | 4201 frame_->EmitPush(eax); // <- slot 1 |
| 4203 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 | 4202 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 |
| 4204 entry.Jump(); | 4203 entry.Jump(); |
| 4205 | 4204 |
| 4206 fixed_array.Bind(); | 4205 fixed_array.Bind(); |
| 4207 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast) | 4206 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast) |
| 4208 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 | 4207 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 |
| 4209 frame_->EmitPush(eax); // <- slot 2 | 4208 frame_->EmitPush(eax); // <- slot 2 |
| 4210 | 4209 |
| 4211 // Push the length of the array and the initial index onto the stack. | 4210 // Push the length of the array and the initial index onto the stack. |
| 4212 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); | 4211 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); |
| 4213 __ SmiTag(eax); | |
| 4214 frame_->EmitPush(eax); // <- slot 1 | 4212 frame_->EmitPush(eax); // <- slot 1 |
| 4215 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 | 4213 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 |
| 4216 | 4214 |
| 4217 // Condition. | 4215 // Condition. |
| 4218 entry.Bind(); | 4216 entry.Bind(); |
| 4219 // Grab the current frame's height for the break and continue | 4217 // Grab the current frame's height for the break and continue |
| 4220 // targets only after all the state is pushed on the frame. | 4218 // targets only after all the state is pushed on the frame. |
| 4221 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); | 4219 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 4222 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); | 4220 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 4223 | 4221 |
| (...skipping 2368 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6592 | 6590 |
| 6593 // Fill out the elements FixedArray. | 6591 // Fill out the elements FixedArray. |
| 6594 // eax: JSArray. | 6592 // eax: JSArray. |
| 6595 // ebx: FixedArray. | 6593 // ebx: FixedArray. |
| 6596 // ecx: Number of elements in array, as smi. | 6594 // ecx: Number of elements in array, as smi. |
| 6597 | 6595 |
| 6598 // Set map. | 6596 // Set map. |
| 6599 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), | 6597 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), |
| 6600 Immediate(Factory::fixed_array_map())); | 6598 Immediate(Factory::fixed_array_map())); |
| 6601 // Set length. | 6599 // Set length. |
| 6602 __ SmiUntag(ecx); | |
| 6603 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); | 6600 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); |
| 6604 // Fill contents of fixed-array with the-hole. | 6601 // Fill contents of fixed-array with the-hole. |
| 6602 __ SmiUntag(ecx); |
| 6605 __ mov(edx, Immediate(Factory::the_hole_value())); | 6603 __ mov(edx, Immediate(Factory::the_hole_value())); |
| 6606 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); | 6604 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); |
| 6607 // Fill fixed array elements with hole. | 6605 // Fill fixed array elements with hole. |
| 6608 // eax: JSArray. | 6606 // eax: JSArray. |
| 6609 // ecx: Number of elements to fill. | 6607 // ecx: Number of elements to fill. |
| 6610 // ebx: Start of elements in FixedArray. | 6608 // ebx: Start of elements in FixedArray. |
| 6611 // edx: the hole. | 6609 // edx: the hole. |
| 6612 Label loop; | 6610 Label loop; |
| 6613 __ test(ecx, Operand(ecx)); | 6611 __ test(ecx, Operand(ecx)); |
| 6614 __ bind(&loop); | 6612 __ bind(&loop); |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6698 | 6696 |
| 6699 // Find a place to put new cached value into. | 6697 // Find a place to put new cached value into. |
| 6700 Label add_new_entry, update_cache; | 6698 Label add_new_entry, update_cache; |
| 6701 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache | 6699 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache |
| 6702 // Possible optimization: cache size is constant for the given cache | 6700 // Possible optimization: cache size is constant for the given cache |
| 6703 // so technically we could use a constant here. However, if we have | 6701 // so technically we could use a constant here. However, if we have |
| 6704 // cache miss this optimization would hardly matter much. | 6702 // cache miss this optimization would hardly matter much. |
| 6705 | 6703 |
| 6706 // Check if we could add new entry to cache. | 6704 // Check if we could add new entry to cache. |
| 6707 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); | 6705 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); |
| 6708 __ SmiTag(ebx); | |
| 6709 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset)); | 6706 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset)); |
| 6710 __ j(greater, &add_new_entry); | 6707 __ j(greater, &add_new_entry); |
| 6711 | 6708 |
| 6712 // Check if we could evict entry after finger. | 6709 // Check if we could evict entry after finger. |
| 6713 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset)); | 6710 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset)); |
| 6714 __ add(Operand(edx), Immediate(kEntrySizeSmi)); | 6711 __ add(Operand(edx), Immediate(kEntrySizeSmi)); |
| 6715 __ cmp(ebx, Operand(edx)); | 6712 __ cmp(ebx, Operand(edx)); |
| 6716 __ j(greater, &update_cache); | 6713 __ j(greater, &update_cache); |
| 6717 | 6714 |
| 6718 // Need to wrap over the cache. | 6715 // Need to wrap over the cache. |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6896 __ mov(tmp2.reg(), Operand(index2.reg(), 0)); | 6893 __ mov(tmp2.reg(), Operand(index2.reg(), 0)); |
| 6897 __ mov(Operand(index2.reg(), 0), object.reg()); | 6894 __ mov(Operand(index2.reg(), 0), object.reg()); |
| 6898 __ mov(Operand(index1.reg(), 0), tmp2.reg()); | 6895 __ mov(Operand(index1.reg(), 0), tmp2.reg()); |
| 6899 | 6896 |
| 6900 Label done; | 6897 Label done; |
| 6901 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); | 6898 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); |
| 6902 // Possible optimization: do a check that both values are Smis | 6899 // Possible optimization: do a check that both values are Smis |
| 6903 // (or them and test against Smi mask.) | 6900 // (or them and test against Smi mask.) |
| 6904 | 6901 |
| 6905 __ mov(tmp2.reg(), tmp1.reg()); | 6902 __ mov(tmp2.reg(), tmp1.reg()); |
| 6906 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg()); | 6903 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg()); |
| 6907 __ CallStub(&recordWrite1); | 6904 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg()); |
| 6908 | |
| 6909 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg()); | |
| 6910 __ CallStub(&recordWrite2); | |
| 6911 | |
| 6912 __ bind(&done); | 6905 __ bind(&done); |
| 6913 | 6906 |
| 6914 deferred->BindExit(); | 6907 deferred->BindExit(); |
| 6915 frame_->Push(Factory::undefined_value()); | 6908 frame_->Push(Factory::undefined_value()); |
| 6916 } | 6909 } |
| 6917 | 6910 |
| 6918 | 6911 |
| 6919 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { | 6912 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { |
| 6920 Comment cmnt(masm_, "[ GenerateCallFunction"); | 6913 Comment cmnt(masm_, "[ GenerateCallFunction"); |
| 6921 | 6914 |
| (...skipping 1678 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8600 Result key = frame_->Pop(); | 8593 Result key = frame_->Pop(); |
| 8601 Result receiver = frame_->Pop(); | 8594 Result receiver = frame_->Pop(); |
| 8602 key.ToRegister(); | 8595 key.ToRegister(); |
| 8603 receiver.ToRegister(); | 8596 receiver.ToRegister(); |
| 8604 | 8597 |
| 8605 // Use a fresh temporary to load the elements without destroying | 8598 // Use a fresh temporary to load the elements without destroying |
| 8606 // the receiver which is needed for the deferred slow case. | 8599 // the receiver which is needed for the deferred slow case. |
| 8607 Result elements = allocator()->Allocate(); | 8600 Result elements = allocator()->Allocate(); |
| 8608 ASSERT(elements.is_valid()); | 8601 ASSERT(elements.is_valid()); |
| 8609 | 8602 |
| 8610 // Use a fresh temporary for the index and later the loaded | 8603 result = elements; |
| 8611 // value. | |
| 8612 result = allocator()->Allocate(); | |
| 8613 ASSERT(result.is_valid()); | |
| 8614 | 8604 |
| 8615 DeferredReferenceGetKeyedValue* deferred = | 8605 DeferredReferenceGetKeyedValue* deferred = |
| 8616 new DeferredReferenceGetKeyedValue(result.reg(), | 8606 new DeferredReferenceGetKeyedValue(elements.reg(), |
| 8617 receiver.reg(), | 8607 receiver.reg(), |
| 8618 key.reg()); | 8608 key.reg()); |
| 8619 | 8609 |
| 8620 __ test(receiver.reg(), Immediate(kSmiTagMask)); | 8610 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
| 8621 deferred->Branch(zero); | 8611 deferred->Branch(zero); |
| 8622 | 8612 |
| 8623 // Initially, use an invalid map. The map is patched in the IC | 8613 // Initially, use an invalid map. The map is patched in the IC |
| 8624 // initialization code. | 8614 // initialization code. |
| 8625 __ bind(deferred->patch_site()); | 8615 __ bind(deferred->patch_site()); |
| 8626 // Use masm-> here instead of the double underscore macro since extra | 8616 // Use masm-> here instead of the double underscore macro since extra |
| (...skipping 11 matching lines...) Expand all Loading... |
| 8638 } | 8628 } |
| 8639 | 8629 |
| 8640 // Get the elements array from the receiver and check that it | 8630 // Get the elements array from the receiver and check that it |
| 8641 // is not a dictionary. | 8631 // is not a dictionary. |
| 8642 __ mov(elements.reg(), | 8632 __ mov(elements.reg(), |
| 8643 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 8633 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
| 8644 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), | 8634 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), |
| 8645 Immediate(Factory::fixed_array_map())); | 8635 Immediate(Factory::fixed_array_map())); |
| 8646 deferred->Branch(not_equal); | 8636 deferred->Branch(not_equal); |
| 8647 | 8637 |
| 8648 // Shift the key to get the actual index value and check that | 8638 // Check that the key is within bounds. |
| 8649 // it is within bounds. Use unsigned comparison to handle negative keys. | 8639 __ cmp(key.reg(), |
| 8650 __ mov(result.reg(), key.reg()); | |
| 8651 __ SmiUntag(result.reg()); | |
| 8652 __ cmp(result.reg(), | |
| 8653 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); | 8640 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
| 8654 deferred->Branch(above_equal); | 8641 deferred->Branch(above_equal); |
| 8655 | 8642 |
| 8656 // Load and check that the result is not the hole. | 8643 // Load and check that the result is not the hole. |
| 8644 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1)); |
| 8657 __ mov(result.reg(), Operand(elements.reg(), | 8645 __ mov(result.reg(), Operand(elements.reg(), |
| 8658 result.reg(), | 8646 key.reg(), |
| 8659 times_4, | 8647 times_2, |
| 8660 FixedArray::kHeaderSize - kHeapObjectTag)); | 8648 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 8661 elements.Unuse(); | |
| 8662 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); | 8649 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); |
| 8663 deferred->Branch(equal); | 8650 deferred->Branch(equal); |
| 8664 __ IncrementCounter(&Counters::keyed_load_inline, 1); | 8651 __ IncrementCounter(&Counters::keyed_load_inline, 1); |
| 8665 | 8652 |
| 8666 deferred->BindExit(); | 8653 deferred->BindExit(); |
| 8667 } else { | 8654 } else { |
| 8668 Comment cmnt(masm_, "[ Load from keyed Property"); | 8655 Comment cmnt(masm_, "[ Load from keyed Property"); |
| 8669 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); | 8656 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); |
| 8670 // Make sure that we do not have a test instruction after the | 8657 // Make sure that we do not have a test instruction after the |
| 8671 // call. A test instruction after the call is used to | 8658 // call. A test instruction after the call is used to |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8736 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); | 8723 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); |
| 8737 deferred->Branch(above_equal); | 8724 deferred->Branch(above_equal); |
| 8738 | 8725 |
| 8739 // Get the elements array from the receiver and check that it is not a | 8726 // Get the elements array from the receiver and check that it is not a |
| 8740 // dictionary. | 8727 // dictionary. |
| 8741 __ mov(tmp.reg(), | 8728 __ mov(tmp.reg(), |
| 8742 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); | 8729 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); |
| 8743 | 8730 |
| 8744 // Check whether it is possible to omit the write barrier. If the elements | 8731 // Check whether it is possible to omit the write barrier. If the elements |
| 8745 // array is in new space or the value written is a smi we can safely update | 8732 // array is in new space or the value written is a smi we can safely update |
| 8746 // the elements array without updating the remembered set. | 8733 // the elements array without write barrier. |
| 8747 Label in_new_space; | 8734 Label in_new_space; |
| 8748 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); | 8735 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); |
| 8749 if (!value_is_constant) { | 8736 if (!value_is_constant) { |
| 8750 __ test(result.reg(), Immediate(kSmiTagMask)); | 8737 __ test(result.reg(), Immediate(kSmiTagMask)); |
| 8751 deferred->Branch(not_zero); | 8738 deferred->Branch(not_zero); |
| 8752 } | 8739 } |
| 8753 | 8740 |
| 8754 __ bind(&in_new_space); | 8741 __ bind(&in_new_space); |
| 8755 // Bind the deferred code patch site to be able to locate the fixed | 8742 // Bind the deferred code patch site to be able to locate the fixed |
| 8756 // array map comparison. When debugging, we patch this comparison to | 8743 // array map comparison. When debugging, we patch this comparison to |
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9008 Label gc; | 8995 Label gc; |
| 9009 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 8996 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 9010 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | 8997 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, |
| 9011 eax, ebx, ecx, &gc, TAG_OBJECT); | 8998 eax, ebx, ecx, &gc, TAG_OBJECT); |
| 9012 | 8999 |
| 9013 // Get the function from the stack. | 9000 // Get the function from the stack. |
| 9014 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 9001 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
| 9015 | 9002 |
| 9016 // Setup the object header. | 9003 // Setup the object header. |
| 9017 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); | 9004 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); |
| 9018 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length)); | 9005 __ mov(FieldOperand(eax, Context::kLengthOffset), |
| 9006 Immediate(Smi::FromInt(length))); |
| 9019 | 9007 |
| 9020 // Setup the fixed slots. | 9008 // Setup the fixed slots. |
| 9021 __ xor_(ebx, Operand(ebx)); // Set to NULL. | 9009 __ xor_(ebx, Operand(ebx)); // Set to NULL. |
| 9022 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); | 9010 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); |
| 9023 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); | 9011 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); |
| 9024 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); | 9012 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); |
| 9025 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); | 9013 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); |
| 9026 | 9014 |
| 9027 // Copy the global object from the surrounding context. We go through the | 9015 // Copy the global object from the surrounding context. We go through the |
| 9028 // context in the function (ecx) to match the allocation behavior we have | 9016 // context in the function (ecx) to match the allocation behavior we have |
| (...skipping 1942 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 10971 // Get the length (smi tagged) and set that as an in-object property too. | 10959 // Get the length (smi tagged) and set that as an in-object property too. |
| 10972 ASSERT(Heap::arguments_length_index == 1); | 10960 ASSERT(Heap::arguments_length_index == 1); |
| 10973 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 10961 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
| 10974 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx); | 10962 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx); |
| 10975 | 10963 |
| 10976 // If there are no actual arguments, we're done. | 10964 // If there are no actual arguments, we're done. |
| 10977 Label done; | 10965 Label done; |
| 10978 __ test(ecx, Operand(ecx)); | 10966 __ test(ecx, Operand(ecx)); |
| 10979 __ j(zero, &done); | 10967 __ j(zero, &done); |
| 10980 | 10968 |
| 10981 // Get the parameters pointer from the stack and untag the length. | 10969 // Get the parameters pointer from the stack. |
| 10982 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 10970 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
| 10983 __ SmiUntag(ecx); | |
| 10984 | 10971 |
| 10985 // Setup the elements pointer in the allocated arguments object and | 10972 // Setup the elements pointer in the allocated arguments object and |
| 10986 // initialize the header in the elements fixed array. | 10973 // initialize the header in the elements fixed array. |
| 10987 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); | 10974 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); |
| 10988 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); | 10975 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); |
| 10989 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 10976 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
| 10990 Immediate(Factory::fixed_array_map())); | 10977 Immediate(Factory::fixed_array_map())); |
| 10991 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); | 10978 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); |
| 10979 // Untag the length for the loop below. |
| 10980 __ SmiUntag(ecx); |
| 10992 | 10981 |
| 10993 // Copy the fixed array slots. | 10982 // Copy the fixed array slots. |
| 10994 Label loop; | 10983 Label loop; |
| 10995 __ bind(&loop); | 10984 __ bind(&loop); |
| 10996 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. | 10985 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. |
| 10997 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); | 10986 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); |
| 10998 __ add(Operand(edi), Immediate(kPointerSize)); | 10987 __ add(Operand(edi), Immediate(kPointerSize)); |
| 10999 __ sub(Operand(edx), Immediate(kPointerSize)); | 10988 __ sub(Operand(edx), Immediate(kPointerSize)); |
| 11000 __ dec(ecx); | 10989 __ dec(ecx); |
| 11001 __ j(not_zero, &loop); | 10990 __ j(not_zero, &loop); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11110 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); | 11099 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); |
| 11111 __ j(not_equal, &runtime); | 11100 __ j(not_equal, &runtime); |
| 11112 // Check that the JSArray is in fast case. | 11101 // Check that the JSArray is in fast case. |
| 11113 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); | 11102 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); |
| 11114 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); | 11103 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); |
| 11115 __ cmp(eax, Factory::fixed_array_map()); | 11104 __ cmp(eax, Factory::fixed_array_map()); |
| 11116 __ j(not_equal, &runtime); | 11105 __ j(not_equal, &runtime); |
| 11117 // Check that the last match info has space for the capture registers and the | 11106 // Check that the last match info has space for the capture registers and the |
| 11118 // additional information. | 11107 // additional information. |
| 11119 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); | 11108 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); |
| 11109 __ SmiUntag(eax); |
| 11120 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); | 11110 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); |
| 11121 __ cmp(edx, Operand(eax)); | 11111 __ cmp(edx, Operand(eax)); |
| 11122 __ j(greater, &runtime); | 11112 __ j(greater, &runtime); |
| 11123 | 11113 |
| 11124 // ecx: RegExp data (FixedArray) | 11114 // ecx: RegExp data (FixedArray) |
| 11125 // Check the representation and encoding of the subject string. | 11115 // Check the representation and encoding of the subject string. |
| 11126 Label seq_string, seq_two_byte_string, check_code; | 11116 Label seq_string, seq_two_byte_string, check_code; |
| 11127 const int kStringRepresentationEncodingMask = | 11117 const int kStringRepresentationEncodingMask = |
| 11128 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 11118 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; |
| 11129 __ mov(eax, Operand(esp, kSubjectOffset)); | 11119 __ mov(eax, Operand(esp, kSubjectOffset)); |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11353 Register scratch = scratch2; | 11343 Register scratch = scratch2; |
| 11354 | 11344 |
| 11355 // Load the number string cache. | 11345 // Load the number string cache. |
| 11356 ExternalReference roots_address = ExternalReference::roots_address(); | 11346 ExternalReference roots_address = ExternalReference::roots_address(); |
| 11357 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); | 11347 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); |
| 11358 __ mov(number_string_cache, | 11348 __ mov(number_string_cache, |
| 11359 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 11349 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
| 11360 // Make the hash mask from the length of the number string cache. It | 11350 // Make the hash mask from the length of the number string cache. It |
| 11361 // contains two elements (number and string) for each cache entry. | 11351 // contains two elements (number and string) for each cache entry. |
| 11362 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); | 11352 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
| 11363 __ shr(mask, 1); // Divide length by two (length is not a smi). | 11353 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. |
| 11364 __ sub(Operand(mask), Immediate(1)); // Make mask. | 11354 __ sub(Operand(mask), Immediate(1)); // Make mask. |
| 11365 | 11355 |
| 11366 // Calculate the entry in the number string cache. The hash value in the | 11356 // Calculate the entry in the number string cache. The hash value in the |
| 11367 // number string cache for smis is just the smi value, and the hash for | 11357 // number string cache for smis is just the smi value, and the hash for |
| 11368 // doubles is the xor of the upper and lower words. See | 11358 // doubles is the xor of the upper and lower words. See |
| 11369 // Heap::GetNumberStringCache. | 11359 // Heap::GetNumberStringCache. |
| 11370 Label smi_hash_calculated; | 11360 Label smi_hash_calculated; |
| 11371 Label load_result_from_cache; | 11361 Label load_result_from_cache; |
| 11372 if (object_is_smi) { | 11362 if (object_is_smi) { |
| 11373 __ mov(scratch, object); | 11363 __ mov(scratch, object); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11444 // Generate code to lookup number in the number string cache. | 11434 // Generate code to lookup number in the number string cache. |
| 11445 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); | 11435 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); |
| 11446 __ ret(1 * kPointerSize); | 11436 __ ret(1 * kPointerSize); |
| 11447 | 11437 |
| 11448 __ bind(&runtime); | 11438 __ bind(&runtime); |
| 11449 // Handle number to string in the runtime system if not found in the cache. | 11439 // Handle number to string in the runtime system if not found in the cache. |
| 11450 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); | 11440 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); |
| 11451 } | 11441 } |
| 11452 | 11442 |
| 11453 | 11443 |
| 11454 void RecordWriteStub::Generate(MacroAssembler* masm) { | |
| 11455 masm->RecordWriteHelper(object_, addr_, scratch_); | |
| 11456 masm->ret(0); | |
| 11457 } | |
| 11458 | |
| 11459 | |
| 11460 static int NegativeComparisonResult(Condition cc) { | 11444 static int NegativeComparisonResult(Condition cc) { |
| 11461 ASSERT(cc != equal); | 11445 ASSERT(cc != equal); |
| 11462 ASSERT((cc == less) || (cc == less_equal) | 11446 ASSERT((cc == less) || (cc == less_equal) |
| 11463 || (cc == greater) || (cc == greater_equal)); | 11447 || (cc == greater) || (cc == greater_equal)); |
| 11464 return (cc == greater || cc == greater_equal) ? LESS : GREATER; | 11448 return (cc == greater || cc == greater_equal) ? LESS : GREATER; |
| 11465 } | 11449 } |
| 11466 | 11450 |
| 11467 | 11451 |
| 11468 void CompareStub::Generate(MacroAssembler* masm) { | 11452 void CompareStub::Generate(MacroAssembler* masm) { |
| 11469 Label call_builtin, done; | 11453 Label call_builtin, done; |
| (...skipping 1814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13284 | 13268 |
| 13285 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 13269 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 13286 // tagged as a small integer. | 13270 // tagged as a small integer. |
| 13287 __ bind(&runtime); | 13271 __ bind(&runtime); |
| 13288 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 13272 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 13289 } | 13273 } |
| 13290 | 13274 |
| 13291 #undef __ | 13275 #undef __ |
| 13292 | 13276 |
| 13293 } } // namespace v8::internal | 13277 } } // namespace v8::internal |
| OLD | NEW |