Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(272)

Side by Side Diff: src/ia32/codegen-ia32.cc

Issue 2274001: Revert r4715. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 10 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/codegen-ia32.h ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 4180 matching lines...) Expand 10 before | Expand all | Expand 10 after
4191 4191
4192 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); 4192 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4193 // Get the bridge array held in the enumeration index field. 4193 // Get the bridge array held in the enumeration index field.
4194 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); 4194 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4195 // Get the cache from the bridge array. 4195 // Get the cache from the bridge array.
4196 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 4196 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4197 4197
4198 frame_->EmitPush(eax); // <- slot 3 4198 frame_->EmitPush(eax); // <- slot 3
4199 frame_->EmitPush(edx); // <- slot 2 4199 frame_->EmitPush(edx); // <- slot 2
4200 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset)); 4200 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
4201 __ SmiTag(eax);
4201 frame_->EmitPush(eax); // <- slot 1 4202 frame_->EmitPush(eax); // <- slot 1
4202 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 4203 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4203 entry.Jump(); 4204 entry.Jump();
4204 4205
4205 fixed_array.Bind(); 4206 fixed_array.Bind();
4206 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast) 4207 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4207 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 4208 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4208 frame_->EmitPush(eax); // <- slot 2 4209 frame_->EmitPush(eax); // <- slot 2
4209 4210
4210 // Push the length of the array and the initial index onto the stack. 4211 // Push the length of the array and the initial index onto the stack.
4211 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); 4212 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
4213 __ SmiTag(eax);
4212 frame_->EmitPush(eax); // <- slot 1 4214 frame_->EmitPush(eax); // <- slot 1
4213 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 4215 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4214 4216
4215 // Condition. 4217 // Condition.
4216 entry.Bind(); 4218 entry.Bind();
4217 // Grab the current frame's height for the break and continue 4219 // Grab the current frame's height for the break and continue
4218 // targets only after all the state is pushed on the frame. 4220 // targets only after all the state is pushed on the frame.
4219 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); 4221 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4220 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); 4222 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4221 4223
(...skipping 2369 matching lines...) Expand 10 before | Expand all | Expand 10 after
6591 6593
6592 // Fill out the elements FixedArray. 6594 // Fill out the elements FixedArray.
6593 // eax: JSArray. 6595 // eax: JSArray.
6594 // ebx: FixedArray. 6596 // ebx: FixedArray.
6595 // ecx: Number of elements in array, as smi. 6597 // ecx: Number of elements in array, as smi.
6596 6598
6597 // Set map. 6599 // Set map.
6598 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), 6600 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
6599 Immediate(Factory::fixed_array_map())); 6601 Immediate(Factory::fixed_array_map()));
6600 // Set length. 6602 // Set length.
6603 __ SmiUntag(ecx);
6601 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); 6604 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
6602 // Fill contents of fixed-array with the-hole. 6605 // Fill contents of fixed-array with the-hole.
6603 __ SmiUntag(ecx);
6604 __ mov(edx, Immediate(Factory::the_hole_value())); 6606 __ mov(edx, Immediate(Factory::the_hole_value()));
6605 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); 6607 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
6606 // Fill fixed array elements with hole. 6608 // Fill fixed array elements with hole.
6607 // eax: JSArray. 6609 // eax: JSArray.
6608 // ecx: Number of elements to fill. 6610 // ecx: Number of elements to fill.
6609 // ebx: Start of elements in FixedArray. 6611 // ebx: Start of elements in FixedArray.
6610 // edx: the hole. 6612 // edx: the hole.
6611 Label loop; 6613 Label loop;
6612 __ test(ecx, Operand(ecx)); 6614 __ test(ecx, Operand(ecx));
6613 __ bind(&loop); 6615 __ bind(&loop);
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
6697 6699
6698 // Find a place to put new cached value into. 6700 // Find a place to put new cached value into.
6699 Label add_new_entry, update_cache; 6701 Label add_new_entry, update_cache;
6700 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache 6702 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
6701 // Possible optimization: cache size is constant for the given cache 6703 // Possible optimization: cache size is constant for the given cache
6702 // so technically we could use a constant here. However, if we have 6704 // so technically we could use a constant here. However, if we have
6703 // cache miss this optimization would hardly matter much. 6705 // cache miss this optimization would hardly matter much.
6704 6706
6705 // Check if we could add new entry to cache. 6707 // Check if we could add new entry to cache.
6706 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); 6708 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
6709 __ SmiTag(ebx);
6707 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset)); 6710 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
6708 __ j(greater, &add_new_entry); 6711 __ j(greater, &add_new_entry);
6709 6712
6710 // Check if we could evict entry after finger. 6713 // Check if we could evict entry after finger.
6711 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset)); 6714 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
6712 __ add(Operand(edx), Immediate(kEntrySizeSmi)); 6715 __ add(Operand(edx), Immediate(kEntrySizeSmi));
6713 __ cmp(ebx, Operand(edx)); 6716 __ cmp(ebx, Operand(edx));
6714 __ j(greater, &update_cache); 6717 __ j(greater, &update_cache);
6715 6718
6716 // Need to wrap over the cache. 6719 // Need to wrap over the cache.
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
6894 __ mov(tmp2.reg(), Operand(index2.reg(), 0)); 6897 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
6895 __ mov(Operand(index2.reg(), 0), object.reg()); 6898 __ mov(Operand(index2.reg(), 0), object.reg());
6896 __ mov(Operand(index1.reg(), 0), tmp2.reg()); 6899 __ mov(Operand(index1.reg(), 0), tmp2.reg());
6897 6900
6898 Label done; 6901 Label done;
6899 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); 6902 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6900 // Possible optimization: do a check that both values are Smis 6903 // Possible optimization: do a check that both values are Smis
6901 // (or them and test against Smi mask.) 6904 // (or them and test against Smi mask.)
6902 6905
6903 __ mov(tmp2.reg(), tmp1.reg()); 6906 __ mov(tmp2.reg(), tmp1.reg());
6904 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg()); 6907 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg());
6905 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg()); 6908 __ CallStub(&recordWrite1);
6909
6910 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg());
6911 __ CallStub(&recordWrite2);
6912
6906 __ bind(&done); 6913 __ bind(&done);
6907 6914
6908 deferred->BindExit(); 6915 deferred->BindExit();
6909 frame_->Push(Factory::undefined_value()); 6916 frame_->Push(Factory::undefined_value());
6910 } 6917 }
6911 6918
6912 6919
6913 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { 6920 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
6914 Comment cmnt(masm_, "[ GenerateCallFunction"); 6921 Comment cmnt(masm_, "[ GenerateCallFunction");
6915 6922
(...skipping 1678 matching lines...) Expand 10 before | Expand all | Expand 10 after
8594 Result key = frame_->Pop(); 8601 Result key = frame_->Pop();
8595 Result receiver = frame_->Pop(); 8602 Result receiver = frame_->Pop();
8596 key.ToRegister(); 8603 key.ToRegister();
8597 receiver.ToRegister(); 8604 receiver.ToRegister();
8598 8605
8599 // Use a fresh temporary to load the elements without destroying 8606 // Use a fresh temporary to load the elements without destroying
8600 // the receiver which is needed for the deferred slow case. 8607 // the receiver which is needed for the deferred slow case.
8601 Result elements = allocator()->Allocate(); 8608 Result elements = allocator()->Allocate();
8602 ASSERT(elements.is_valid()); 8609 ASSERT(elements.is_valid());
8603 8610
8604 result = elements; 8611 // Use a fresh temporary for the index and later the loaded
8612 // value.
8613 result = allocator()->Allocate();
8614 ASSERT(result.is_valid());
8605 8615
8606 DeferredReferenceGetKeyedValue* deferred = 8616 DeferredReferenceGetKeyedValue* deferred =
8607 new DeferredReferenceGetKeyedValue(elements.reg(), 8617 new DeferredReferenceGetKeyedValue(result.reg(),
8608 receiver.reg(), 8618 receiver.reg(),
8609 key.reg()); 8619 key.reg());
8610 8620
8611 __ test(receiver.reg(), Immediate(kSmiTagMask)); 8621 __ test(receiver.reg(), Immediate(kSmiTagMask));
8612 deferred->Branch(zero); 8622 deferred->Branch(zero);
8613 8623
8614 // Initially, use an invalid map. The map is patched in the IC 8624 // Initially, use an invalid map. The map is patched in the IC
8615 // initialization code. 8625 // initialization code.
8616 __ bind(deferred->patch_site()); 8626 __ bind(deferred->patch_site());
8617 // Use masm-> here instead of the double underscore macro since extra 8627 // Use masm-> here instead of the double underscore macro since extra
(...skipping 11 matching lines...) Expand all
8629 } 8639 }
8630 8640
8631 // Get the elements array from the receiver and check that it 8641 // Get the elements array from the receiver and check that it
8632 // is not a dictionary. 8642 // is not a dictionary.
8633 __ mov(elements.reg(), 8643 __ mov(elements.reg(),
8634 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); 8644 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
8635 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), 8645 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
8636 Immediate(Factory::fixed_array_map())); 8646 Immediate(Factory::fixed_array_map()));
8637 deferred->Branch(not_equal); 8647 deferred->Branch(not_equal);
8638 8648
8639 // Check that the key is within bounds. 8649 // Shift the key to get the actual index value and check that
8640 __ cmp(key.reg(), 8650 // it is within bounds. Use unsigned comparison to handle negative keys.
8651 __ mov(result.reg(), key.reg());
8652 __ SmiUntag(result.reg());
8653 __ cmp(result.reg(),
8641 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); 8654 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
8642 deferred->Branch(above_equal); 8655 deferred->Branch(above_equal);
8643 8656
8644 // Load and check that the result is not the hole. 8657 // Load and check that the result is not the hole.
8645 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
8646 __ mov(result.reg(), Operand(elements.reg(), 8658 __ mov(result.reg(), Operand(elements.reg(),
8647 key.reg(), 8659 result.reg(),
8648 times_2, 8660 times_4,
8649 FixedArray::kHeaderSize - kHeapObjectTag)); 8661 FixedArray::kHeaderSize - kHeapObjectTag));
8662 elements.Unuse();
8650 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); 8663 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
8651 deferred->Branch(equal); 8664 deferred->Branch(equal);
8652 __ IncrementCounter(&Counters::keyed_load_inline, 1); 8665 __ IncrementCounter(&Counters::keyed_load_inline, 1);
8653 8666
8654 deferred->BindExit(); 8667 deferred->BindExit();
8655 } else { 8668 } else {
8656 Comment cmnt(masm_, "[ Load from keyed Property"); 8669 Comment cmnt(masm_, "[ Load from keyed Property");
8657 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); 8670 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
8658 // Make sure that we do not have a test instruction after the 8671 // Make sure that we do not have a test instruction after the
8659 // call. A test instruction after the call is used to 8672 // call. A test instruction after the call is used to
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
8724 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); 8737 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
8725 deferred->Branch(above_equal); 8738 deferred->Branch(above_equal);
8726 8739
8727 // Get the elements array from the receiver and check that it is not a 8740 // Get the elements array from the receiver and check that it is not a
8728 // dictionary. 8741 // dictionary.
8729 __ mov(tmp.reg(), 8742 __ mov(tmp.reg(),
8730 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); 8743 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
8731 8744
8732 // Check whether it is possible to omit the write barrier. If the elements 8745 // Check whether it is possible to omit the write barrier. If the elements
8733 // array is in new space or the value written is a smi we can safely update 8746 // array is in new space or the value written is a smi we can safely update
8734 // the elements array without write barrier. 8747 // the elements array without updating the remembered set.
8735 Label in_new_space; 8748 Label in_new_space;
8736 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); 8749 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
8737 if (!value_is_constant) { 8750 if (!value_is_constant) {
8738 __ test(result.reg(), Immediate(kSmiTagMask)); 8751 __ test(result.reg(), Immediate(kSmiTagMask));
8739 deferred->Branch(not_zero); 8752 deferred->Branch(not_zero);
8740 } 8753 }
8741 8754
8742 __ bind(&in_new_space); 8755 __ bind(&in_new_space);
8743 // Bind the deferred code patch site to be able to locate the fixed 8756 // Bind the deferred code patch site to be able to locate the fixed
8744 // array map comparison. When debugging, we patch this comparison to 8757 // array map comparison. When debugging, we patch this comparison to
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after
8994 Label gc; 9007 Label gc;
8995 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 9008 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
8996 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, 9009 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
8997 eax, ebx, ecx, &gc, TAG_OBJECT); 9010 eax, ebx, ecx, &gc, TAG_OBJECT);
8998 9011
8999 // Get the function from the stack. 9012 // Get the function from the stack.
9000 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 9013 __ mov(ecx, Operand(esp, 1 * kPointerSize));
9001 9014
9002 // Setup the object header. 9015 // Setup the object header.
9003 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); 9016 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
9004 __ mov(FieldOperand(eax, Context::kLengthOffset), 9017 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length));
9005 Immediate(Smi::FromInt(length)));
9006 9018
9007 // Setup the fixed slots. 9019 // Setup the fixed slots.
9008 __ xor_(ebx, Operand(ebx)); // Set to NULL. 9020 __ xor_(ebx, Operand(ebx)); // Set to NULL.
9009 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); 9021 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
9010 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); 9022 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
9011 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); 9023 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
9012 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); 9024 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
9013 9025
9014 // Copy the global object from the surrounding context. We go through the 9026 // Copy the global object from the surrounding context. We go through the
9015 // context in the function (ecx) to match the allocation behavior we have 9027 // context in the function (ecx) to match the allocation behavior we have
(...skipping 1942 matching lines...) Expand 10 before | Expand all | Expand 10 after
10958 // Get the length (smi tagged) and set that as an in-object property too. 10970 // Get the length (smi tagged) and set that as an in-object property too.
10959 ASSERT(Heap::arguments_length_index == 1); 10971 ASSERT(Heap::arguments_length_index == 1);
10960 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 10972 __ mov(ecx, Operand(esp, 1 * kPointerSize));
10961 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx); 10973 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
10962 10974
10963 // If there are no actual arguments, we're done. 10975 // If there are no actual arguments, we're done.
10964 Label done; 10976 Label done;
10965 __ test(ecx, Operand(ecx)); 10977 __ test(ecx, Operand(ecx));
10966 __ j(zero, &done); 10978 __ j(zero, &done);
10967 10979
10968 // Get the parameters pointer from the stack. 10980 // Get the parameters pointer from the stack and untag the length.
10969 __ mov(edx, Operand(esp, 2 * kPointerSize)); 10981 __ mov(edx, Operand(esp, 2 * kPointerSize));
10982 __ SmiUntag(ecx);
10970 10983
10971 // Setup the elements pointer in the allocated arguments object and 10984 // Setup the elements pointer in the allocated arguments object and
10972 // initialize the header in the elements fixed array. 10985 // initialize the header in the elements fixed array.
10973 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); 10986 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
10974 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 10987 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
10975 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 10988 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
10976 Immediate(Factory::fixed_array_map())); 10989 Immediate(Factory::fixed_array_map()));
10977 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 10990 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
10978 // Untag the length for the loop below.
10979 __ SmiUntag(ecx);
10980 10991
10981 // Copy the fixed array slots. 10992 // Copy the fixed array slots.
10982 Label loop; 10993 Label loop;
10983 __ bind(&loop); 10994 __ bind(&loop);
10984 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. 10995 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
10985 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); 10996 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
10986 __ add(Operand(edi), Immediate(kPointerSize)); 10997 __ add(Operand(edi), Immediate(kPointerSize));
10987 __ sub(Operand(edx), Immediate(kPointerSize)); 10998 __ sub(Operand(edx), Immediate(kPointerSize));
10988 __ dec(ecx); 10999 __ dec(ecx);
10989 __ j(not_zero, &loop); 11000 __ j(not_zero, &loop);
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
11098 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 11109 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
11099 __ j(not_equal, &runtime); 11110 __ j(not_equal, &runtime);
11100 // Check that the JSArray is in fast case. 11111 // Check that the JSArray is in fast case.
11101 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); 11112 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11102 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); 11113 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
11103 __ cmp(eax, Factory::fixed_array_map()); 11114 __ cmp(eax, Factory::fixed_array_map());
11104 __ j(not_equal, &runtime); 11115 __ j(not_equal, &runtime);
11105 // Check that the last match info has space for the capture registers and the 11116 // Check that the last match info has space for the capture registers and the
11106 // additional information. 11117 // additional information.
11107 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); 11118 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
11108 __ SmiUntag(eax);
11109 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); 11119 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
11110 __ cmp(edx, Operand(eax)); 11120 __ cmp(edx, Operand(eax));
11111 __ j(greater, &runtime); 11121 __ j(greater, &runtime);
11112 11122
11113 // ecx: RegExp data (FixedArray) 11123 // ecx: RegExp data (FixedArray)
11114 // Check the representation and encoding of the subject string. 11124 // Check the representation and encoding of the subject string.
11115 Label seq_string, seq_two_byte_string, check_code; 11125 Label seq_string, seq_two_byte_string, check_code;
11116 const int kStringRepresentationEncodingMask = 11126 const int kStringRepresentationEncodingMask =
11117 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; 11127 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
11118 __ mov(eax, Operand(esp, kSubjectOffset)); 11128 __ mov(eax, Operand(esp, kSubjectOffset));
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after
11342 Register scratch = scratch2; 11352 Register scratch = scratch2;
11343 11353
11344 // Load the number string cache. 11354 // Load the number string cache.
11345 ExternalReference roots_address = ExternalReference::roots_address(); 11355 ExternalReference roots_address = ExternalReference::roots_address();
11346 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); 11356 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
11347 __ mov(number_string_cache, 11357 __ mov(number_string_cache,
11348 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 11358 Operand::StaticArray(scratch, times_pointer_size, roots_address));
11349 // Make the hash mask from the length of the number string cache. It 11359 // Make the hash mask from the length of the number string cache. It
11350 // contains two elements (number and string) for each cache entry. 11360 // contains two elements (number and string) for each cache entry.
11351 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 11361 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
11352 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. 11362 __ shr(mask, 1); // Divide length by two (length is not a smi).
11353 __ sub(Operand(mask), Immediate(1)); // Make mask. 11363 __ sub(Operand(mask), Immediate(1)); // Make mask.
11354 11364
11355 // Calculate the entry in the number string cache. The hash value in the 11365 // Calculate the entry in the number string cache. The hash value in the
11356 // number string cache for smis is just the smi value, and the hash for 11366 // number string cache for smis is just the smi value, and the hash for
11357 // doubles is the xor of the upper and lower words. See 11367 // doubles is the xor of the upper and lower words. See
11358 // Heap::GetNumberStringCache. 11368 // Heap::GetNumberStringCache.
11359 Label smi_hash_calculated; 11369 Label smi_hash_calculated;
11360 Label load_result_from_cache; 11370 Label load_result_from_cache;
11361 if (object_is_smi) { 11371 if (object_is_smi) {
11362 __ mov(scratch, object); 11372 __ mov(scratch, object);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
11433 // Generate code to lookup number in the number string cache. 11443 // Generate code to lookup number in the number string cache.
11434 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); 11444 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
11435 __ ret(1 * kPointerSize); 11445 __ ret(1 * kPointerSize);
11436 11446
11437 __ bind(&runtime); 11447 __ bind(&runtime);
11438 // Handle number to string in the runtime system if not found in the cache. 11448 // Handle number to string in the runtime system if not found in the cache.
11439 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 11449 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
11440 } 11450 }
11441 11451
11442 11452
11453 void RecordWriteStub::Generate(MacroAssembler* masm) {
11454 masm->RecordWriteHelper(object_, addr_, scratch_);
11455 masm->ret(0);
11456 }
11457
11458
11443 static int NegativeComparisonResult(Condition cc) { 11459 static int NegativeComparisonResult(Condition cc) {
11444 ASSERT(cc != equal); 11460 ASSERT(cc != equal);
11445 ASSERT((cc == less) || (cc == less_equal) 11461 ASSERT((cc == less) || (cc == less_equal)
11446 || (cc == greater) || (cc == greater_equal)); 11462 || (cc == greater) || (cc == greater_equal));
11447 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 11463 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
11448 } 11464 }
11449 11465
11450 11466
11451 void CompareStub::Generate(MacroAssembler* masm) { 11467 void CompareStub::Generate(MacroAssembler* masm) {
11452 Label call_builtin, done; 11468 Label call_builtin, done;
(...skipping 1816 matching lines...) Expand 10 before | Expand all | Expand 10 after
13269 // tagged as a small integer. 13285 // tagged as a small integer.
13270 __ bind(&runtime); 13286 __ bind(&runtime);
13271 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 13287 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
13272 } 13288 }
13273 13289
13274 #undef __ 13290 #undef __
13275 13291
13276 } } // namespace v8::internal 13292 } } // namespace v8::internal
13277 13293
13278 #endif // V8_TARGET_ARCH_IA32 13294 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/codegen-ia32.h ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698