Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(102)

Side by Side Diff: src/ia32/codegen-ia32.cc

Issue 2114015: Cardmarking writebarrier. (Closed)
Patch Set: Created 10 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 4183 matching lines...) Expand 10 before | Expand all | Expand 10 after
4194 4194
4195 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); 4195 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4196 // Get the bridge array held in the enumeration index field. 4196 // Get the bridge array held in the enumeration index field.
4197 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); 4197 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4198 // Get the cache from the bridge array. 4198 // Get the cache from the bridge array.
4199 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 4199 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4200 4200
4201 frame_->EmitPush(eax); // <- slot 3 4201 frame_->EmitPush(eax); // <- slot 3
4202 frame_->EmitPush(edx); // <- slot 2 4202 frame_->EmitPush(edx); // <- slot 2
4203 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset)); 4203 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
4204 __ SmiTag(eax);
4205 frame_->EmitPush(eax); // <- slot 1 4204 frame_->EmitPush(eax); // <- slot 1
4206 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 4205 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4207 entry.Jump(); 4206 entry.Jump();
4208 4207
4209 fixed_array.Bind(); 4208 fixed_array.Bind();
4210 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast) 4209 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4211 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 4210 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4212 frame_->EmitPush(eax); // <- slot 2 4211 frame_->EmitPush(eax); // <- slot 2
4213 4212
4214 // Push the length of the array and the initial index onto the stack. 4213 // Push the length of the array and the initial index onto the stack.
4215 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); 4214 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
4216 __ SmiTag(eax);
4217 frame_->EmitPush(eax); // <- slot 1 4215 frame_->EmitPush(eax); // <- slot 1
4218 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 4216 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4219 4217
4220 // Condition. 4218 // Condition.
4221 entry.Bind(); 4219 entry.Bind();
4222 // Grab the current frame's height for the break and continue 4220 // Grab the current frame's height for the break and continue
4223 // targets only after all the state is pushed on the frame. 4221 // targets only after all the state is pushed on the frame.
4224 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); 4222 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4225 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); 4223 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4226 4224
(...skipping 2371 matching lines...) Expand 10 before | Expand all | Expand 10 after
6598 6596
6599 // Fill out the elements FixedArray. 6597 // Fill out the elements FixedArray.
6600 // eax: JSArray. 6598 // eax: JSArray.
6601 // ebx: FixedArray. 6599 // ebx: FixedArray.
6602 // ecx: Number of elements in array, as smi. 6600 // ecx: Number of elements in array, as smi.
6603 6601
6604 // Set map. 6602 // Set map.
6605 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), 6603 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
6606 Immediate(Factory::fixed_array_map())); 6604 Immediate(Factory::fixed_array_map()));
6607 // Set length. 6605 // Set length.
6608 __ SmiUntag(ecx);
6609 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); 6606 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
6610 // Fill contents of fixed-array with the-hole. 6607 // Fill contents of fixed-array with the-hole.
6608 __ SmiUntag(ecx);
6611 __ mov(edx, Immediate(Factory::the_hole_value())); 6609 __ mov(edx, Immediate(Factory::the_hole_value()));
6612 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); 6610 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
6613 // Fill fixed array elements with hole. 6611 // Fill fixed array elements with hole.
6614 // eax: JSArray. 6612 // eax: JSArray.
6615 // ecx: Number of elements to fill. 6613 // ecx: Number of elements to fill.
6616 // ebx: Start of elements in FixedArray. 6614 // ebx: Start of elements in FixedArray.
6617 // edx: the hole. 6615 // edx: the hole.
6618 Label loop; 6616 Label loop;
6619 __ test(ecx, Operand(ecx)); 6617 __ test(ecx, Operand(ecx));
6620 __ bind(&loop); 6618 __ bind(&loop);
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
6704 6702
6705 // Find a place to put new cached value into. 6703 // Find a place to put new cached value into.
6706 Label add_new_entry, update_cache; 6704 Label add_new_entry, update_cache;
6707 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache 6705 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
6708 // Possible optimization: cache size is constant for the given cache 6706 // Possible optimization: cache size is constant for the given cache
6709 // so technically we could use a constant here. However, if we have 6707 // so technically we could use a constant here. However, if we have
6710 // cache miss this optimization would hardly matter much. 6708 // cache miss this optimization would hardly matter much.
6711 6709
6712 // Check if we could add new entry to cache. 6710 // Check if we could add new entry to cache.
6713 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); 6711 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
6714 __ SmiTag(ebx);
6715 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset)); 6712 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
6716 __ j(greater, &add_new_entry); 6713 __ j(greater, &add_new_entry);
6717 6714
6718 // Check if we could evict entry after finger. 6715 // Check if we could evict entry after finger.
6719 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset)); 6716 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
6720 __ add(Operand(edx), Immediate(kEntrySizeSmi)); 6717 __ add(Operand(edx), Immediate(kEntrySizeSmi));
6721 __ cmp(ebx, Operand(edx)); 6718 __ cmp(ebx, Operand(edx));
6722 __ j(greater, &update_cache); 6719 __ j(greater, &update_cache);
6723 6720
6724 // Need to wrap over the cache. 6721 // Need to wrap over the cache.
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
6902 __ mov(tmp2.reg(), Operand(index2.reg(), 0)); 6899 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
6903 __ mov(Operand(index2.reg(), 0), object.reg()); 6900 __ mov(Operand(index2.reg(), 0), object.reg());
6904 __ mov(Operand(index1.reg(), 0), tmp2.reg()); 6901 __ mov(Operand(index1.reg(), 0), tmp2.reg());
6905 6902
6906 Label done; 6903 Label done;
6907 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); 6904 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6908 // Possible optimization: do a check that both values are Smis 6905 // Possible optimization: do a check that both values are Smis
6909 // (or them and test against Smi mask.) 6906 // (or them and test against Smi mask.)
6910 6907
6911 __ mov(tmp2.reg(), tmp1.reg()); 6908 __ mov(tmp2.reg(), tmp1.reg());
6912 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg()); 6909 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
6913 __ CallStub(&recordWrite1); 6910 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
6914
6915 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg());
6916 __ CallStub(&recordWrite2);
6917
6918 __ bind(&done); 6911 __ bind(&done);
6919 6912
6920 deferred->BindExit(); 6913 deferred->BindExit();
6921 frame_->Push(Factory::undefined_value()); 6914 frame_->Push(Factory::undefined_value());
6922 } 6915 }
6923 6916
6924 6917
6925 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { 6918 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
6926 Comment cmnt(masm_, "[ GenerateCallFunction"); 6919 Comment cmnt(masm_, "[ GenerateCallFunction");
6927 6920
(...skipping 1678 matching lines...) Expand 10 before | Expand all | Expand 10 after
8606 Result key = frame_->Pop(); 8599 Result key = frame_->Pop();
8607 Result receiver = frame_->Pop(); 8600 Result receiver = frame_->Pop();
8608 key.ToRegister(); 8601 key.ToRegister();
8609 receiver.ToRegister(); 8602 receiver.ToRegister();
8610 8603
8611 // Use a fresh temporary to load the elements without destroying 8604 // Use a fresh temporary to load the elements without destroying
8612 // the receiver which is needed for the deferred slow case. 8605 // the receiver which is needed for the deferred slow case.
8613 Result elements = allocator()->Allocate(); 8606 Result elements = allocator()->Allocate();
8614 ASSERT(elements.is_valid()); 8607 ASSERT(elements.is_valid());
8615 8608
8616 // Use a fresh temporary for the index and later the loaded 8609 result = elements;
8617 // value.
8618 result = allocator()->Allocate();
8619 ASSERT(result.is_valid());
8620 8610
8621 DeferredReferenceGetKeyedValue* deferred = 8611 DeferredReferenceGetKeyedValue* deferred =
8622 new DeferredReferenceGetKeyedValue(result.reg(), 8612 new DeferredReferenceGetKeyedValue(elements.reg(),
8623 receiver.reg(), 8613 receiver.reg(),
8624 key.reg()); 8614 key.reg());
8625 8615
8626 __ test(receiver.reg(), Immediate(kSmiTagMask)); 8616 __ test(receiver.reg(), Immediate(kSmiTagMask));
8627 deferred->Branch(zero); 8617 deferred->Branch(zero);
8628 8618
8629 // Initially, use an invalid map. The map is patched in the IC 8619 // Initially, use an invalid map. The map is patched in the IC
8630 // initialization code. 8620 // initialization code.
8631 __ bind(deferred->patch_site()); 8621 __ bind(deferred->patch_site());
8632 // Use masm-> here instead of the double underscore macro since extra 8622 // Use masm-> here instead of the double underscore macro since extra
(...skipping 11 matching lines...) Expand all
8644 } 8634 }
8645 8635
8646 // Get the elements array from the receiver and check that it 8636 // Get the elements array from the receiver and check that it
8647 // is not a dictionary. 8637 // is not a dictionary.
8648 __ mov(elements.reg(), 8638 __ mov(elements.reg(),
8649 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); 8639 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
8650 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), 8640 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
8651 Immediate(Factory::fixed_array_map())); 8641 Immediate(Factory::fixed_array_map()));
8652 deferred->Branch(not_equal); 8642 deferred->Branch(not_equal);
8653 8643
8654 // Shift the key to get the actual index value and check that 8644 // Check that the key is within bounds.
8655 // it is within bounds. Use unsigned comparison to handle negative keys. 8645 __ cmp(key.reg(),
8656 __ mov(result.reg(), key.reg());
8657 __ SmiUntag(result.reg());
8658 __ cmp(result.reg(),
8659 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); 8646 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
8660 deferred->Branch(above_equal); 8647 deferred->Branch(above_equal);
8661 8648
8662 // Load and check that the result is not the hole. 8649 // Load and check that the result is not the hole.
8650 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
8663 __ mov(result.reg(), Operand(elements.reg(), 8651 __ mov(result.reg(), Operand(elements.reg(),
8664 result.reg(), 8652 key.reg(),
8665 times_4, 8653 times_2,
8666 FixedArray::kHeaderSize - kHeapObjectTag)); 8654 FixedArray::kHeaderSize - kHeapObjectTag));
8667 elements.Unuse();
8668 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); 8655 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
8669 deferred->Branch(equal); 8656 deferred->Branch(equal);
8670 __ IncrementCounter(&Counters::keyed_load_inline, 1); 8657 __ IncrementCounter(&Counters::keyed_load_inline, 1);
8671 8658
8672 deferred->BindExit(); 8659 deferred->BindExit();
8673 } else { 8660 } else {
8674 Comment cmnt(masm_, "[ Load from keyed Property"); 8661 Comment cmnt(masm_, "[ Load from keyed Property");
8675 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); 8662 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
8676 // Make sure that we do not have a test instruction after the 8663 // Make sure that we do not have a test instruction after the
8677 // call. A test instruction after the call is used to 8664 // call. A test instruction after the call is used to
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
8742 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); 8729 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
8743 deferred->Branch(above_equal); 8730 deferred->Branch(above_equal);
8744 8731
8745 // Get the elements array from the receiver and check that it is not a 8732 // Get the elements array from the receiver and check that it is not a
8746 // dictionary. 8733 // dictionary.
8747 __ mov(tmp.reg(), 8734 __ mov(tmp.reg(),
8748 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); 8735 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
8749 8736
8750 // Check whether it is possible to omit the write barrier. If the elements 8737 // Check whether it is possible to omit the write barrier. If the elements
8751 // array is in new space or the value written is a smi we can safely update 8738 // array is in new space or the value written is a smi we can safely update
8752 // the elements array without updating the remembered set. 8739 // the elements array without write barrier.
8753 Label in_new_space; 8740 Label in_new_space;
8754 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); 8741 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
8755 if (!value_is_constant) { 8742 if (!value_is_constant) {
8756 __ test(result.reg(), Immediate(kSmiTagMask)); 8743 __ test(result.reg(), Immediate(kSmiTagMask));
8757 deferred->Branch(not_zero); 8744 deferred->Branch(not_zero);
8758 } 8745 }
8759 8746
8760 __ bind(&in_new_space); 8747 __ bind(&in_new_space);
8761 // Bind the deferred code patch site to be able to locate the fixed 8748 // Bind the deferred code patch site to be able to locate the fixed
8762 // array map comparison. When debugging, we patch this comparison to 8749 // array map comparison. When debugging, we patch this comparison to
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after
9014 Label gc; 9001 Label gc;
9015 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 9002 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
9016 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, 9003 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
9017 eax, ebx, ecx, &gc, TAG_OBJECT); 9004 eax, ebx, ecx, &gc, TAG_OBJECT);
9018 9005
9019 // Get the function from the stack. 9006 // Get the function from the stack.
9020 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 9007 __ mov(ecx, Operand(esp, 1 * kPointerSize));
9021 9008
9022 // Setup the object header. 9009 // Setup the object header.
9023 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); 9010 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
9024 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length)); 9011 __ mov(FieldOperand(eax, Context::kLengthOffset),
9012 Immediate(Smi::FromInt(length)));
9025 9013
9026 // Setup the fixed slots. 9014 // Setup the fixed slots.
9027 __ xor_(ebx, Operand(ebx)); // Set to NULL. 9015 __ xor_(ebx, Operand(ebx)); // Set to NULL.
9028 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); 9016 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
9029 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); 9017 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
9030 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); 9018 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
9031 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); 9019 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
9032 9020
9033 // Copy the global object from the surrounding context. We go through the 9021 // Copy the global object from the surrounding context. We go through the
9034 // context in the function (ecx) to match the allocation behavior we have 9022 // context in the function (ecx) to match the allocation behavior we have
(...skipping 1942 matching lines...) Expand 10 before | Expand all | Expand 10 after
10977 // Get the length (smi tagged) and set that as an in-object property too. 10965 // Get the length (smi tagged) and set that as an in-object property too.
10978 ASSERT(Heap::arguments_length_index == 1); 10966 ASSERT(Heap::arguments_length_index == 1);
10979 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 10967 __ mov(ecx, Operand(esp, 1 * kPointerSize));
10980 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx); 10968 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
10981 10969
10982 // If there are no actual arguments, we're done. 10970 // If there are no actual arguments, we're done.
10983 Label done; 10971 Label done;
10984 __ test(ecx, Operand(ecx)); 10972 __ test(ecx, Operand(ecx));
10985 __ j(zero, &done); 10973 __ j(zero, &done);
10986 10974
10987 // Get the parameters pointer from the stack and untag the length. 10975 // Get the parameters pointer from the stack.
10988 __ mov(edx, Operand(esp, 2 * kPointerSize)); 10976 __ mov(edx, Operand(esp, 2 * kPointerSize));
10989 __ SmiUntag(ecx);
10990 10977
10991 // Setup the elements pointer in the allocated arguments object and 10978 // Setup the elements pointer in the allocated arguments object and
10992 // initialize the header in the elements fixed array. 10979 // initialize the header in the elements fixed array.
10993 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); 10980 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
10994 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 10981 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
10995 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 10982 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
10996 Immediate(Factory::fixed_array_map())); 10983 Immediate(Factory::fixed_array_map()));
10997 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 10984 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
10985 // Untag the length for the loop below.
10986 __ SmiUntag(ecx);
10998 10987
10999 // Copy the fixed array slots. 10988 // Copy the fixed array slots.
11000 Label loop; 10989 Label loop;
11001 __ bind(&loop); 10990 __ bind(&loop);
11002 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. 10991 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
11003 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); 10992 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
11004 __ add(Operand(edi), Immediate(kPointerSize)); 10993 __ add(Operand(edi), Immediate(kPointerSize));
11005 __ sub(Operand(edx), Immediate(kPointerSize)); 10994 __ sub(Operand(edx), Immediate(kPointerSize));
11006 __ dec(ecx); 10995 __ dec(ecx);
11007 __ j(not_zero, &loop); 10996 __ j(not_zero, &loop);
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
11116 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 11105 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
11117 __ j(not_equal, &runtime); 11106 __ j(not_equal, &runtime);
11118 // Check that the JSArray is in fast case. 11107 // Check that the JSArray is in fast case.
11119 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); 11108 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11120 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); 11109 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
11121 __ cmp(eax, Factory::fixed_array_map()); 11110 __ cmp(eax, Factory::fixed_array_map());
11122 __ j(not_equal, &runtime); 11111 __ j(not_equal, &runtime);
11123 // Check that the last match info has space for the capture registers and the 11112 // Check that the last match info has space for the capture registers and the
11124 // additional information. 11113 // additional information.
11125 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); 11114 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
11115 __ SmiUntag(eax);
11126 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); 11116 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
11127 __ cmp(edx, Operand(eax)); 11117 __ cmp(edx, Operand(eax));
11128 __ j(greater, &runtime); 11118 __ j(greater, &runtime);
11129 11119
11130 // ecx: RegExp data (FixedArray) 11120 // ecx: RegExp data (FixedArray)
11131 // Check the representation and encoding of the subject string. 11121 // Check the representation and encoding of the subject string.
11132 Label seq_string, seq_two_byte_string, check_code; 11122 Label seq_string, seq_two_byte_string, check_code;
11133 const int kStringRepresentationEncodingMask = 11123 const int kStringRepresentationEncodingMask =
11134 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 11124 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
11135 __ mov(eax, Operand(esp, kSubjectOffset)); 11125 __ mov(eax, Operand(esp, kSubjectOffset));
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after
11359 Register scratch = scratch2; 11349 Register scratch = scratch2;
11360 11350
11361 // Load the number string cache. 11351 // Load the number string cache.
11362 ExternalReference roots_address = ExternalReference::roots_address(); 11352 ExternalReference roots_address = ExternalReference::roots_address();
11363 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); 11353 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
11364 __ mov(number_string_cache, 11354 __ mov(number_string_cache,
11365 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 11355 Operand::StaticArray(scratch, times_pointer_size, roots_address));
11366 // Make the hash mask from the length of the number string cache. It 11356 // Make the hash mask from the length of the number string cache. It
11367 // contains two elements (number and string) for each cache entry. 11357 // contains two elements (number and string) for each cache entry.
11368 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 11358 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
11369 __ shr(mask, 1); // Divide length by two (length is not a smi). 11359 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
11370 __ sub(Operand(mask), Immediate(1)); // Make mask. 11360 __ sub(Operand(mask), Immediate(1)); // Make mask.
11371 11361
11372 // Calculate the entry in the number string cache. The hash value in the 11362 // Calculate the entry in the number string cache. The hash value in the
11373 // number string cache for smis is just the smi value, and the hash for 11363 // number string cache for smis is just the smi value, and the hash for
11374 // doubles is the xor of the upper and lower words. See 11364 // doubles is the xor of the upper and lower words. See
11375 // Heap::GetNumberStringCache. 11365 // Heap::GetNumberStringCache.
11376 Label smi_hash_calculated; 11366 Label smi_hash_calculated;
11377 Label load_result_from_cache; 11367 Label load_result_from_cache;
11378 if (object_is_smi) { 11368 if (object_is_smi) {
11379 __ mov(scratch, object); 11369 __ mov(scratch, object);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
11450 // Generate code to lookup number in the number string cache. 11440 // Generate code to lookup number in the number string cache.
11451 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); 11441 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
11452 __ ret(1 * kPointerSize); 11442 __ ret(1 * kPointerSize);
11453 11443
11454 __ bind(&runtime); 11444 __ bind(&runtime);
11455 // Handle number to string in the runtime system if not found in the cache. 11445 // Handle number to string in the runtime system if not found in the cache.
11456 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 11446 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
11457 } 11447 }
11458 11448
11459 11449
11460 void RecordWriteStub::Generate(MacroAssembler* masm) {
11461 masm->RecordWriteHelper(object_, addr_, scratch_);
11462 masm->ret(0);
11463 }
11464
11465
11466 static int NegativeComparisonResult(Condition cc) { 11450 static int NegativeComparisonResult(Condition cc) {
11467 ASSERT(cc != equal); 11451 ASSERT(cc != equal);
11468 ASSERT((cc == less) || (cc == less_equal) 11452 ASSERT((cc == less) || (cc == less_equal)
11469 || (cc == greater) || (cc == greater_equal)); 11453 || (cc == greater) || (cc == greater_equal));
11470 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 11454 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
11471 } 11455 }
11472 11456
11473 11457
11474 void CompareStub::Generate(MacroAssembler* masm) { 11458 void CompareStub::Generate(MacroAssembler* masm) {
11475 Label call_builtin, done; 11459 Label call_builtin, done;
(...skipping 1816 matching lines...) Expand 10 before | Expand all | Expand 10 after
13292 // tagged as a small integer. 13276 // tagged as a small integer.
13293 __ bind(&runtime); 13277 __ bind(&runtime);
13294 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 13278 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
13295 } 13279 }
13296 13280
13297 #undef __ 13281 #undef __
13298 13282
13299 } } // namespace v8::internal 13283 } } // namespace v8::internal
13300 13284
13301 #endif // V8_TARGET_ARCH_IA32 13285 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698