| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 4183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4194 | 4194 |
| 4195 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); | 4195 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); |
| 4196 // Get the bridge array held in the enumeration index field. | 4196 // Get the bridge array held in the enumeration index field. |
| 4197 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); | 4197 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset)); |
| 4198 // Get the cache from the bridge array. | 4198 // Get the cache from the bridge array. |
| 4199 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 4199 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 4200 | 4200 |
| 4201 frame_->EmitPush(eax); // <- slot 3 | 4201 frame_->EmitPush(eax); // <- slot 3 |
| 4202 frame_->EmitPush(edx); // <- slot 2 | 4202 frame_->EmitPush(edx); // <- slot 2 |
| 4203 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset)); | 4203 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset)); |
| 4204 __ SmiTag(eax); |
| 4204 frame_->EmitPush(eax); // <- slot 1 | 4205 frame_->EmitPush(eax); // <- slot 1 |
| 4205 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 | 4206 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 |
| 4206 entry.Jump(); | 4207 entry.Jump(); |
| 4207 | 4208 |
| 4208 fixed_array.Bind(); | 4209 fixed_array.Bind(); |
| 4209 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast) | 4210 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast) |
| 4210 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 | 4211 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3 |
| 4211 frame_->EmitPush(eax); // <- slot 2 | 4212 frame_->EmitPush(eax); // <- slot 2 |
| 4212 | 4213 |
| 4213 // Push the length of the array and the initial index onto the stack. | 4214 // Push the length of the array and the initial index onto the stack. |
| 4214 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); | 4215 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); |
| 4216 __ SmiTag(eax); |
| 4215 frame_->EmitPush(eax); // <- slot 1 | 4217 frame_->EmitPush(eax); // <- slot 1 |
| 4216 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 | 4218 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 |
| 4217 | 4219 |
| 4218 // Condition. | 4220 // Condition. |
| 4219 entry.Bind(); | 4221 entry.Bind(); |
| 4220 // Grab the current frame's height for the break and continue | 4222 // Grab the current frame's height for the break and continue |
| 4221 // targets only after all the state is pushed on the frame. | 4223 // targets only after all the state is pushed on the frame. |
| 4222 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); | 4224 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 4223 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); | 4225 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 4224 | 4226 |
| (...skipping 1931 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6156 Result map = allocator()->Allocate(); | 6158 Result map = allocator()->Allocate(); |
| 6157 ASSERT(map.is_valid()); | 6159 ASSERT(map.is_valid()); |
| 6158 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); | 6160 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
| 6159 // Undetectable objects behave like undefined when tested with typeof. | 6161 // Undetectable objects behave like undefined when tested with typeof. |
| 6160 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset)); | 6162 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset)); |
| 6161 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable)); | 6163 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable)); |
| 6162 destination()->false_target()->Branch(not_zero); | 6164 destination()->false_target()->Branch(not_zero); |
| 6163 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); | 6165 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
| 6164 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset)); | 6166 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset)); |
| 6165 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE); | 6167 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE); |
| 6166 destination()->false_target()->Branch(below); | 6168 destination()->false_target()->Branch(less); |
| 6167 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE); | 6169 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE); |
| 6168 obj.Unuse(); | 6170 obj.Unuse(); |
| 6169 map.Unuse(); | 6171 map.Unuse(); |
| 6170 destination()->Split(below_equal); | 6172 destination()->Split(less_equal); |
| 6171 } | 6173 } |
| 6172 | 6174 |
| 6173 | 6175 |
| 6174 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) { | 6176 void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) { |
| 6175 // This generates a fast version of: | 6177 // This generates a fast version of: |
| 6176 // (%_ClassOf(arg) === 'Function') | 6178 // (%_ClassOf(arg) === 'Function') |
| 6177 ASSERT(args->length() == 1); | 6179 ASSERT(args->length() == 1); |
| 6178 Load(args->at(0)); | 6180 Load(args->at(0)); |
| 6179 Result obj = frame_->Pop(); | 6181 Result obj = frame_->Pop(); |
| 6180 obj.ToRegister(); | 6182 obj.ToRegister(); |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6273 // If the object is a smi, we return null. | 6275 // If the object is a smi, we return null. |
| 6274 __ test(obj.reg(), Immediate(kSmiTagMask)); | 6276 __ test(obj.reg(), Immediate(kSmiTagMask)); |
| 6275 null.Branch(zero); | 6277 null.Branch(zero); |
| 6276 | 6278 |
| 6277 // Check that the object is a JS object but take special care of JS | 6279 // Check that the object is a JS object but take special care of JS |
| 6278 // functions to make sure they have 'Function' as their class. | 6280 // functions to make sure they have 'Function' as their class. |
| 6279 { Result tmp = allocator()->Allocate(); | 6281 { Result tmp = allocator()->Allocate(); |
| 6280 __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); | 6282 __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
| 6281 __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset)); | 6283 __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset)); |
| 6282 __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE); | 6284 __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE); |
| 6283 null.Branch(below); | 6285 null.Branch(less); |
| 6284 | 6286 |
| 6285 // As long as JS_FUNCTION_TYPE is the last instance type and it is | 6287 // As long as JS_FUNCTION_TYPE is the last instance type and it is |
| 6286 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for | 6288 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for |
| 6287 // LAST_JS_OBJECT_TYPE. | 6289 // LAST_JS_OBJECT_TYPE. |
| 6288 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | 6290 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 6289 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); | 6291 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
| 6290 __ cmp(tmp.reg(), JS_FUNCTION_TYPE); | 6292 __ cmp(tmp.reg(), JS_FUNCTION_TYPE); |
| 6291 function.Branch(equal); | 6293 function.Branch(equal); |
| 6292 } | 6294 } |
| 6293 | 6295 |
| (...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6596 | 6598 |
| 6597 // Fill out the elements FixedArray. | 6599 // Fill out the elements FixedArray. |
| 6598 // eax: JSArray. | 6600 // eax: JSArray. |
| 6599 // ebx: FixedArray. | 6601 // ebx: FixedArray. |
| 6600 // ecx: Number of elements in array, as smi. | 6602 // ecx: Number of elements in array, as smi. |
| 6601 | 6603 |
| 6602 // Set map. | 6604 // Set map. |
| 6603 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), | 6605 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), |
| 6604 Immediate(Factory::fixed_array_map())); | 6606 Immediate(Factory::fixed_array_map())); |
| 6605 // Set length. | 6607 // Set length. |
| 6608 __ SmiUntag(ecx); |
| 6606 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); | 6609 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx); |
| 6607 // Fill contents of fixed-array with the-hole. | 6610 // Fill contents of fixed-array with the-hole. |
| 6608 __ SmiUntag(ecx); | |
| 6609 __ mov(edx, Immediate(Factory::the_hole_value())); | 6611 __ mov(edx, Immediate(Factory::the_hole_value())); |
| 6610 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); | 6612 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); |
| 6611 // Fill fixed array elements with hole. | 6613 // Fill fixed array elements with hole. |
| 6612 // eax: JSArray. | 6614 // eax: JSArray. |
| 6613 // ecx: Number of elements to fill. | 6615 // ecx: Number of elements to fill. |
| 6614 // ebx: Start of elements in FixedArray. | 6616 // ebx: Start of elements in FixedArray. |
| 6615 // edx: the hole. | 6617 // edx: the hole. |
| 6616 Label loop; | 6618 Label loop; |
| 6617 __ test(ecx, Operand(ecx)); | 6619 __ test(ecx, Operand(ecx)); |
| 6618 __ bind(&loop); | 6620 __ bind(&loop); |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6702 | 6704 |
| 6703 // Find a place to put new cached value into. | 6705 // Find a place to put new cached value into. |
| 6704 Label add_new_entry, update_cache; | 6706 Label add_new_entry, update_cache; |
| 6705 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache | 6707 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache |
| 6706 // Possible optimization: cache size is constant for the given cache | 6708 // Possible optimization: cache size is constant for the given cache |
| 6707 // so technically we could use a constant here. However, if we have | 6709 // so technically we could use a constant here. However, if we have |
| 6708 // cache miss this optimization would hardly matter much. | 6710 // cache miss this optimization would hardly matter much. |
| 6709 | 6711 |
| 6710 // Check if we could add new entry to cache. | 6712 // Check if we could add new entry to cache. |
| 6711 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); | 6713 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); |
| 6714 __ SmiTag(ebx); |
| 6712 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset)); | 6715 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset)); |
| 6713 __ j(greater, &add_new_entry); | 6716 __ j(greater, &add_new_entry); |
| 6714 | 6717 |
| 6715 // Check if we could evict entry after finger. | 6718 // Check if we could evict entry after finger. |
| 6716 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset)); | 6719 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset)); |
| 6717 __ add(Operand(edx), Immediate(kEntrySizeSmi)); | 6720 __ add(Operand(edx), Immediate(kEntrySizeSmi)); |
| 6718 __ cmp(ebx, Operand(edx)); | 6721 __ cmp(ebx, Operand(edx)); |
| 6719 __ j(greater, &update_cache); | 6722 __ j(greater, &update_cache); |
| 6720 | 6723 |
| 6721 // Need to wrap over the cache. | 6724 // Need to wrap over the cache. |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6862 frame_->Spill(index2.reg()); | 6865 frame_->Spill(index2.reg()); |
| 6863 | 6866 |
| 6864 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(), | 6867 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(), |
| 6865 index1.reg(), | 6868 index1.reg(), |
| 6866 index2.reg()); | 6869 index2.reg()); |
| 6867 | 6870 |
| 6868 // Fetch the map and check if array is in fast case. | 6871 // Fetch the map and check if array is in fast case. |
| 6869 // Check that object doesn't require security checks and | 6872 // Check that object doesn't require security checks and |
| 6870 // has no indexed interceptor. | 6873 // has no indexed interceptor. |
| 6871 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); | 6874 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); |
| 6872 deferred->Branch(below); | 6875 deferred->Branch(less); |
| 6873 __ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset)); | 6876 __ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset)); |
| 6874 __ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); | 6877 __ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); |
| 6875 deferred->Branch(not_zero); | 6878 deferred->Branch(not_zero); |
| 6876 | 6879 |
| 6877 // Check the object's elements are in fast case. | 6880 // Check the object's elements are in fast case. |
| 6878 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); | 6881 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); |
| 6879 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), | 6882 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), |
| 6880 Immediate(Factory::fixed_array_map())); | 6883 Immediate(Factory::fixed_array_map())); |
| 6881 deferred->Branch(not_equal); | 6884 deferred->Branch(not_equal); |
| 6882 | 6885 |
| (...skipping 16 matching lines...) Expand all Loading... |
| 6899 __ mov(tmp2.reg(), Operand(index2.reg(), 0)); | 6902 __ mov(tmp2.reg(), Operand(index2.reg(), 0)); |
| 6900 __ mov(Operand(index2.reg(), 0), object.reg()); | 6903 __ mov(Operand(index2.reg(), 0), object.reg()); |
| 6901 __ mov(Operand(index1.reg(), 0), tmp2.reg()); | 6904 __ mov(Operand(index1.reg(), 0), tmp2.reg()); |
| 6902 | 6905 |
| 6903 Label done; | 6906 Label done; |
| 6904 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); | 6907 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); |
| 6905 // Possible optimization: do a check that both values are Smis | 6908 // Possible optimization: do a check that both values are Smis |
| 6906 // (or them and test against Smi mask.) | 6909 // (or them and test against Smi mask.) |
| 6907 | 6910 |
| 6908 __ mov(tmp2.reg(), tmp1.reg()); | 6911 __ mov(tmp2.reg(), tmp1.reg()); |
| 6909 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg()); | 6912 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg()); |
| 6910 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg()); | 6913 __ CallStub(&recordWrite1); |
| 6914 |
| 6915 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg()); |
| 6916 __ CallStub(&recordWrite2); |
| 6917 |
| 6911 __ bind(&done); | 6918 __ bind(&done); |
| 6912 | 6919 |
| 6913 deferred->BindExit(); | 6920 deferred->BindExit(); |
| 6914 frame_->Push(Factory::undefined_value()); | 6921 frame_->Push(Factory::undefined_value()); |
| 6915 } | 6922 } |
| 6916 | 6923 |
| 6917 | 6924 |
| 6918 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { | 6925 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { |
| 6919 Comment cmnt(masm_, "[ GenerateCallFunction"); | 6926 Comment cmnt(masm_, "[ GenerateCallFunction"); |
| 6920 | 6927 |
| (...skipping 1257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8178 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg()); | 8185 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg()); |
| 8179 destination()->false_target()->Branch(equal); | 8186 destination()->false_target()->Branch(equal); |
| 8180 | 8187 |
| 8181 // It can be an undetectable object. | 8188 // It can be an undetectable object. |
| 8182 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset)); | 8189 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset)); |
| 8183 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable)); | 8190 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable)); |
| 8184 destination()->false_target()->Branch(not_zero); | 8191 destination()->false_target()->Branch(not_zero); |
| 8185 __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); | 8192 __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); |
| 8186 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset)); | 8193 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset)); |
| 8187 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE); | 8194 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE); |
| 8188 destination()->false_target()->Branch(below); | 8195 destination()->false_target()->Branch(less); |
| 8189 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE); | 8196 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE); |
| 8190 answer.Unuse(); | 8197 answer.Unuse(); |
| 8191 map.Unuse(); | 8198 map.Unuse(); |
| 8192 destination()->Split(below_equal); | 8199 destination()->Split(less_equal); |
| 8193 } else { | 8200 } else { |
| 8194 // Uncommon case: typeof testing against a string literal that is | 8201 // Uncommon case: typeof testing against a string literal that is |
| 8195 // never returned from the typeof operator. | 8202 // never returned from the typeof operator. |
| 8196 answer.Unuse(); | 8203 answer.Unuse(); |
| 8197 destination()->Goto(false); | 8204 destination()->Goto(false); |
| 8198 } | 8205 } |
| 8199 return; | 8206 return; |
| 8200 } else if (op == Token::LT && | 8207 } else if (op == Token::LT && |
| 8201 right->AsLiteral() != NULL && | 8208 right->AsLiteral() != NULL && |
| 8202 right->AsLiteral()->handle()->IsHeapNumber()) { | 8209 right->AsLiteral()->handle()->IsHeapNumber()) { |
| (...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8599 Result key = frame_->Pop(); | 8606 Result key = frame_->Pop(); |
| 8600 Result receiver = frame_->Pop(); | 8607 Result receiver = frame_->Pop(); |
| 8601 key.ToRegister(); | 8608 key.ToRegister(); |
| 8602 receiver.ToRegister(); | 8609 receiver.ToRegister(); |
| 8603 | 8610 |
| 8604 // Use a fresh temporary to load the elements without destroying | 8611 // Use a fresh temporary to load the elements without destroying |
| 8605 // the receiver which is needed for the deferred slow case. | 8612 // the receiver which is needed for the deferred slow case. |
| 8606 Result elements = allocator()->Allocate(); | 8613 Result elements = allocator()->Allocate(); |
| 8607 ASSERT(elements.is_valid()); | 8614 ASSERT(elements.is_valid()); |
| 8608 | 8615 |
| 8609 result = elements; | 8616 // Use a fresh temporary for the index and later the loaded |
| 8617 // value. |
| 8618 result = allocator()->Allocate(); |
| 8619 ASSERT(result.is_valid()); |
| 8610 | 8620 |
| 8611 DeferredReferenceGetKeyedValue* deferred = | 8621 DeferredReferenceGetKeyedValue* deferred = |
| 8612 new DeferredReferenceGetKeyedValue(elements.reg(), | 8622 new DeferredReferenceGetKeyedValue(result.reg(), |
| 8613 receiver.reg(), | 8623 receiver.reg(), |
| 8614 key.reg()); | 8624 key.reg()); |
| 8615 | 8625 |
| 8616 __ test(receiver.reg(), Immediate(kSmiTagMask)); | 8626 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
| 8617 deferred->Branch(zero); | 8627 deferred->Branch(zero); |
| 8618 | 8628 |
| 8619 // Initially, use an invalid map. The map is patched in the IC | 8629 // Initially, use an invalid map. The map is patched in the IC |
| 8620 // initialization code. | 8630 // initialization code. |
| 8621 __ bind(deferred->patch_site()); | 8631 __ bind(deferred->patch_site()); |
| 8622 // Use masm-> here instead of the double underscore macro since extra | 8632 // Use masm-> here instead of the double underscore macro since extra |
| (...skipping 11 matching lines...) Expand all Loading... |
| 8634 } | 8644 } |
| 8635 | 8645 |
| 8636 // Get the elements array from the receiver and check that it | 8646 // Get the elements array from the receiver and check that it |
| 8637 // is not a dictionary. | 8647 // is not a dictionary. |
| 8638 __ mov(elements.reg(), | 8648 __ mov(elements.reg(), |
| 8639 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 8649 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
| 8640 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), | 8650 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), |
| 8641 Immediate(Factory::fixed_array_map())); | 8651 Immediate(Factory::fixed_array_map())); |
| 8642 deferred->Branch(not_equal); | 8652 deferred->Branch(not_equal); |
| 8643 | 8653 |
| 8644 // Check that the key is within bounds. | 8654 // Shift the key to get the actual index value and check that |
| 8645 __ cmp(key.reg(), | 8655 // it is within bounds. Use unsigned comparison to handle negative keys. |
| 8656 __ mov(result.reg(), key.reg()); |
| 8657 __ SmiUntag(result.reg()); |
| 8658 __ cmp(result.reg(), |
| 8646 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); | 8659 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
| 8647 deferred->Branch(above_equal); | 8660 deferred->Branch(above_equal); |
| 8648 | 8661 |
| 8649 // Load and check that the result is not the hole. | 8662 // Load and check that the result is not the hole. |
| 8650 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1)); | |
| 8651 __ mov(result.reg(), Operand(elements.reg(), | 8663 __ mov(result.reg(), Operand(elements.reg(), |
| 8652 key.reg(), | 8664 result.reg(), |
| 8653 times_2, | 8665 times_4, |
| 8654 FixedArray::kHeaderSize - kHeapObjectTag)); | 8666 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 8667 elements.Unuse(); |
| 8655 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); | 8668 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); |
| 8656 deferred->Branch(equal); | 8669 deferred->Branch(equal); |
| 8657 __ IncrementCounter(&Counters::keyed_load_inline, 1); | 8670 __ IncrementCounter(&Counters::keyed_load_inline, 1); |
| 8658 | 8671 |
| 8659 deferred->BindExit(); | 8672 deferred->BindExit(); |
| 8660 } else { | 8673 } else { |
| 8661 Comment cmnt(masm_, "[ Load from keyed Property"); | 8674 Comment cmnt(masm_, "[ Load from keyed Property"); |
| 8662 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); | 8675 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); |
| 8663 // Make sure that we do not have a test instruction after the | 8676 // Make sure that we do not have a test instruction after the |
| 8664 // call. A test instruction after the call is used to | 8677 // call. A test instruction after the call is used to |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8729 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); | 8742 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); |
| 8730 deferred->Branch(above_equal); | 8743 deferred->Branch(above_equal); |
| 8731 | 8744 |
| 8732 // Get the elements array from the receiver and check that it is not a | 8745 // Get the elements array from the receiver and check that it is not a |
| 8733 // dictionary. | 8746 // dictionary. |
| 8734 __ mov(tmp.reg(), | 8747 __ mov(tmp.reg(), |
| 8735 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); | 8748 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); |
| 8736 | 8749 |
| 8737 // Check whether it is possible to omit the write barrier. If the elements | 8750 // Check whether it is possible to omit the write barrier. If the elements |
| 8738 // array is in new space or the value written is a smi we can safely update | 8751 // array is in new space or the value written is a smi we can safely update |
| 8739 // the elements array without write barrier. | 8752 // the elements array without updating the remembered set. |
| 8740 Label in_new_space; | 8753 Label in_new_space; |
| 8741 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); | 8754 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); |
| 8742 if (!value_is_constant) { | 8755 if (!value_is_constant) { |
| 8743 __ test(result.reg(), Immediate(kSmiTagMask)); | 8756 __ test(result.reg(), Immediate(kSmiTagMask)); |
| 8744 deferred->Branch(not_zero); | 8757 deferred->Branch(not_zero); |
| 8745 } | 8758 } |
| 8746 | 8759 |
| 8747 __ bind(&in_new_space); | 8760 __ bind(&in_new_space); |
| 8748 // Bind the deferred code patch site to be able to locate the fixed | 8761 // Bind the deferred code patch site to be able to locate the fixed |
| 8749 // array map comparison. When debugging, we patch this comparison to | 8762 // array map comparison. When debugging, we patch this comparison to |
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9001 Label gc; | 9014 Label gc; |
| 9002 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 9015 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 9003 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | 9016 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, |
| 9004 eax, ebx, ecx, &gc, TAG_OBJECT); | 9017 eax, ebx, ecx, &gc, TAG_OBJECT); |
| 9005 | 9018 |
| 9006 // Get the function from the stack. | 9019 // Get the function from the stack. |
| 9007 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 9020 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
| 9008 | 9021 |
| 9009 // Setup the object header. | 9022 // Setup the object header. |
| 9010 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); | 9023 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); |
| 9011 __ mov(FieldOperand(eax, Context::kLengthOffset), | 9024 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length)); |
| 9012 Immediate(Smi::FromInt(length))); | |
| 9013 | 9025 |
| 9014 // Setup the fixed slots. | 9026 // Setup the fixed slots. |
| 9015 __ xor_(ebx, Operand(ebx)); // Set to NULL. | 9027 __ xor_(ebx, Operand(ebx)); // Set to NULL. |
| 9016 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); | 9028 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); |
| 9017 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); | 9029 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); |
| 9018 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); | 9030 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); |
| 9019 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); | 9031 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); |
| 9020 | 9032 |
| 9021 // Copy the global object from the surrounding context. We go through the | 9033 // Copy the global object from the surrounding context. We go through the |
| 9022 // context in the function (ecx) to match the allocation behavior we have | 9034 // context in the function (ecx) to match the allocation behavior we have |
| (...skipping 1942 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 10965 // Get the length (smi tagged) and set that as an in-object property too. | 10977 // Get the length (smi tagged) and set that as an in-object property too. |
| 10966 ASSERT(Heap::arguments_length_index == 1); | 10978 ASSERT(Heap::arguments_length_index == 1); |
| 10967 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 10979 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
| 10968 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx); | 10980 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx); |
| 10969 | 10981 |
| 10970 // If there are no actual arguments, we're done. | 10982 // If there are no actual arguments, we're done. |
| 10971 Label done; | 10983 Label done; |
| 10972 __ test(ecx, Operand(ecx)); | 10984 __ test(ecx, Operand(ecx)); |
| 10973 __ j(zero, &done); | 10985 __ j(zero, &done); |
| 10974 | 10986 |
| 10975 // Get the parameters pointer from the stack. | 10987 // Get the parameters pointer from the stack and untag the length. |
| 10976 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 10988 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
| 10989 __ SmiUntag(ecx); |
| 10977 | 10990 |
| 10978 // Setup the elements pointer in the allocated arguments object and | 10991 // Setup the elements pointer in the allocated arguments object and |
| 10979 // initialize the header in the elements fixed array. | 10992 // initialize the header in the elements fixed array. |
| 10980 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); | 10993 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize)); |
| 10981 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); | 10994 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); |
| 10982 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 10995 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
| 10983 Immediate(Factory::fixed_array_map())); | 10996 Immediate(Factory::fixed_array_map())); |
| 10984 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); | 10997 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); |
| 10985 // Untag the length for the loop below. | |
| 10986 __ SmiUntag(ecx); | |
| 10987 | 10998 |
| 10988 // Copy the fixed array slots. | 10999 // Copy the fixed array slots. |
| 10989 Label loop; | 11000 Label loop; |
| 10990 __ bind(&loop); | 11001 __ bind(&loop); |
| 10991 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. | 11002 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. |
| 10992 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); | 11003 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); |
| 10993 __ add(Operand(edi), Immediate(kPointerSize)); | 11004 __ add(Operand(edi), Immediate(kPointerSize)); |
| 10994 __ sub(Operand(edx), Immediate(kPointerSize)); | 11005 __ sub(Operand(edx), Immediate(kPointerSize)); |
| 10995 __ dec(ecx); | 11006 __ dec(ecx); |
| 10996 __ j(not_zero, &loop); | 11007 __ j(not_zero, &loop); |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11105 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); | 11116 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); |
| 11106 __ j(not_equal, &runtime); | 11117 __ j(not_equal, &runtime); |
| 11107 // Check that the JSArray is in fast case. | 11118 // Check that the JSArray is in fast case. |
| 11108 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); | 11119 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); |
| 11109 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); | 11120 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); |
| 11110 __ cmp(eax, Factory::fixed_array_map()); | 11121 __ cmp(eax, Factory::fixed_array_map()); |
| 11111 __ j(not_equal, &runtime); | 11122 __ j(not_equal, &runtime); |
| 11112 // Check that the last match info has space for the capture registers and the | 11123 // Check that the last match info has space for the capture registers and the |
| 11113 // additional information. | 11124 // additional information. |
| 11114 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); | 11125 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); |
| 11115 __ SmiUntag(eax); | |
| 11116 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); | 11126 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); |
| 11117 __ cmp(edx, Operand(eax)); | 11127 __ cmp(edx, Operand(eax)); |
| 11118 __ j(greater, &runtime); | 11128 __ j(greater, &runtime); |
| 11119 | 11129 |
| 11120 // ecx: RegExp data (FixedArray) | 11130 // ecx: RegExp data (FixedArray) |
| 11121 // Check the representation and encoding of the subject string. | 11131 // Check the representation and encoding of the subject string. |
| 11122 Label seq_string, seq_two_byte_string, check_code; | 11132 Label seq_string, seq_two_byte_string, check_code; |
| 11123 const int kStringRepresentationEncodingMask = | 11133 const int kStringRepresentationEncodingMask = |
| 11124 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; | 11134 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask; |
| 11125 __ mov(eax, Operand(esp, kSubjectOffset)); | 11135 __ mov(eax, Operand(esp, kSubjectOffset)); |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11349 Register scratch = scratch2; | 11359 Register scratch = scratch2; |
| 11350 | 11360 |
| 11351 // Load the number string cache. | 11361 // Load the number string cache. |
| 11352 ExternalReference roots_address = ExternalReference::roots_address(); | 11362 ExternalReference roots_address = ExternalReference::roots_address(); |
| 11353 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); | 11363 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); |
| 11354 __ mov(number_string_cache, | 11364 __ mov(number_string_cache, |
| 11355 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 11365 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
| 11356 // Make the hash mask from the length of the number string cache. It | 11366 // Make the hash mask from the length of the number string cache. It |
| 11357 // contains two elements (number and string) for each cache entry. | 11367 // contains two elements (number and string) for each cache entry. |
| 11358 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); | 11368 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
| 11359 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. | 11369 __ shr(mask, 1); // Divide length by two (length is not a smi). |
| 11360 __ sub(Operand(mask), Immediate(1)); // Make mask. | 11370 __ sub(Operand(mask), Immediate(1)); // Make mask. |
| 11361 | 11371 |
| 11362 // Calculate the entry in the number string cache. The hash value in the | 11372 // Calculate the entry in the number string cache. The hash value in the |
| 11363 // number string cache for smis is just the smi value, and the hash for | 11373 // number string cache for smis is just the smi value, and the hash for |
| 11364 // doubles is the xor of the upper and lower words. See | 11374 // doubles is the xor of the upper and lower words. See |
| 11365 // Heap::GetNumberStringCache. | 11375 // Heap::GetNumberStringCache. |
| 11366 Label smi_hash_calculated; | 11376 Label smi_hash_calculated; |
| 11367 Label load_result_from_cache; | 11377 Label load_result_from_cache; |
| 11368 if (object_is_smi) { | 11378 if (object_is_smi) { |
| 11369 __ mov(scratch, object); | 11379 __ mov(scratch, object); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11440 // Generate code to lookup number in the number string cache. | 11450 // Generate code to lookup number in the number string cache. |
| 11441 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); | 11451 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime); |
| 11442 __ ret(1 * kPointerSize); | 11452 __ ret(1 * kPointerSize); |
| 11443 | 11453 |
| 11444 __ bind(&runtime); | 11454 __ bind(&runtime); |
| 11445 // Handle number to string in the runtime system if not found in the cache. | 11455 // Handle number to string in the runtime system if not found in the cache. |
| 11446 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); | 11456 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); |
| 11447 } | 11457 } |
| 11448 | 11458 |
| 11449 | 11459 |
| 11460 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| 11461 masm->RecordWriteHelper(object_, addr_, scratch_); |
| 11462 masm->ret(0); |
| 11463 } |
| 11464 |
| 11465 |
| 11450 static int NegativeComparisonResult(Condition cc) { | 11466 static int NegativeComparisonResult(Condition cc) { |
| 11451 ASSERT(cc != equal); | 11467 ASSERT(cc != equal); |
| 11452 ASSERT((cc == less) || (cc == less_equal) | 11468 ASSERT((cc == less) || (cc == less_equal) |
| 11453 || (cc == greater) || (cc == greater_equal)); | 11469 || (cc == greater) || (cc == greater_equal)); |
| 11454 return (cc == greater || cc == greater_equal) ? LESS : GREATER; | 11470 return (cc == greater || cc == greater_equal) ? LESS : GREATER; |
| 11455 } | 11471 } |
| 11456 | 11472 |
| 11457 | 11473 |
| 11458 void CompareStub::Generate(MacroAssembler* masm) { | 11474 void CompareStub::Generate(MacroAssembler* masm) { |
| 11459 Label call_builtin, done; | 11475 Label call_builtin, done; |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11579 // There is no test for undetectability in strict equality. | 11595 // There is no test for undetectability in strict equality. |
| 11580 | 11596 |
| 11581 // Get the type of the first operand. | 11597 // Get the type of the first operand. |
| 11582 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); | 11598 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); |
| 11583 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); | 11599 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); |
| 11584 | 11600 |
| 11585 // If the first object is a JS object, we have done pointer comparison. | 11601 // If the first object is a JS object, we have done pointer comparison. |
| 11586 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | 11602 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 11587 Label first_non_object; | 11603 Label first_non_object; |
| 11588 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); | 11604 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); |
| 11589 __ j(below, &first_non_object); | 11605 __ j(less, &first_non_object); |
| 11590 | 11606 |
| 11591 // Return non-zero (eax is not zero) | 11607 // Return non-zero (eax is not zero) |
| 11592 Label return_not_equal; | 11608 Label return_not_equal; |
| 11593 ASSERT(kHeapObjectTag != 0); | 11609 ASSERT(kHeapObjectTag != 0); |
| 11594 __ bind(&return_not_equal); | 11610 __ bind(&return_not_equal); |
| 11595 __ ret(0); | 11611 __ ret(0); |
| 11596 | 11612 |
| 11597 __ bind(&first_non_object); | 11613 __ bind(&first_non_object); |
| 11598 // Check for oddballs: true, false, null, undefined. | 11614 // Check for oddballs: true, false, null, undefined. |
| 11599 __ cmp(ecx, ODDBALL_TYPE); | 11615 __ cmp(ecx, ODDBALL_TYPE); |
| 11600 __ j(equal, &return_not_equal); | 11616 __ j(equal, &return_not_equal); |
| 11601 | 11617 |
| 11602 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); | 11618 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); |
| 11603 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); | 11619 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); |
| 11604 | 11620 |
| 11605 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); | 11621 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); |
| 11606 __ j(above_equal, &return_not_equal); | 11622 __ j(greater_equal, &return_not_equal); |
| 11607 | 11623 |
| 11608 // Check for oddballs: true, false, null, undefined. | 11624 // Check for oddballs: true, false, null, undefined. |
| 11609 __ cmp(ecx, ODDBALL_TYPE); | 11625 __ cmp(ecx, ODDBALL_TYPE); |
| 11610 __ j(equal, &return_not_equal); | 11626 __ j(equal, &return_not_equal); |
| 11611 | 11627 |
| 11612 // Fall through to the general case. | 11628 // Fall through to the general case. |
| 11613 } | 11629 } |
| 11614 __ bind(&slow); | 11630 __ bind(&slow); |
| 11615 } | 11631 } |
| 11616 | 11632 |
| (...skipping 627 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 12244 // Get the object - go slow case if it's a smi. | 12260 // Get the object - go slow case if it's a smi. |
| 12245 Label slow; | 12261 Label slow; |
| 12246 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function | 12262 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function |
| 12247 __ test(eax, Immediate(kSmiTagMask)); | 12263 __ test(eax, Immediate(kSmiTagMask)); |
| 12248 __ j(zero, &slow, not_taken); | 12264 __ j(zero, &slow, not_taken); |
| 12249 | 12265 |
| 12250 // Check that the left hand is a JS object. | 12266 // Check that the left hand is a JS object. |
| 12251 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map | 12267 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map |
| 12252 __ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type | 12268 __ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type |
| 12253 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); | 12269 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); |
| 12254 __ j(below, &slow, not_taken); | 12270 __ j(less, &slow, not_taken); |
| 12255 __ cmp(ecx, LAST_JS_OBJECT_TYPE); | 12271 __ cmp(ecx, LAST_JS_OBJECT_TYPE); |
| 12256 __ j(above, &slow, not_taken); | 12272 __ j(greater, &slow, not_taken); |
| 12257 | 12273 |
| 12258 // Get the prototype of the function. | 12274 // Get the prototype of the function. |
| 12259 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address | 12275 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address |
| 12260 // edx is function, eax is map. | 12276 // edx is function, eax is map. |
| 12261 | 12277 |
| 12262 // Look up the function and the map in the instanceof cache. | 12278 // Look up the function and the map in the instanceof cache. |
| 12263 Label miss; | 12279 Label miss; |
| 12264 ExternalReference roots_address = ExternalReference::roots_address(); | 12280 ExternalReference roots_address = ExternalReference::roots_address(); |
| 12265 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 12281 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
| 12266 __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address)); | 12282 __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address)); |
| 12267 __ j(not_equal, &miss); | 12283 __ j(not_equal, &miss); |
| 12268 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 12284 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
| 12269 __ cmp(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address)); | 12285 __ cmp(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address)); |
| 12270 __ j(not_equal, &miss); | 12286 __ j(not_equal, &miss); |
| 12271 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 12287 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
| 12272 __ mov(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address)); | 12288 __ mov(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address)); |
| 12273 __ ret(2 * kPointerSize); | 12289 __ ret(2 * kPointerSize); |
| 12274 | 12290 |
| 12275 __ bind(&miss); | 12291 __ bind(&miss); |
| 12276 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow); | 12292 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow); |
| 12277 | 12293 |
| 12278 // Check that the function prototype is a JS object. | 12294 // Check that the function prototype is a JS object. |
| 12279 __ test(ebx, Immediate(kSmiTagMask)); | 12295 __ test(ebx, Immediate(kSmiTagMask)); |
| 12280 __ j(zero, &slow, not_taken); | 12296 __ j(zero, &slow, not_taken); |
| 12281 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); | 12297 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); |
| 12282 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); | 12298 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); |
| 12283 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); | 12299 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); |
| 12284 __ j(below, &slow, not_taken); | 12300 __ j(less, &slow, not_taken); |
| 12285 __ cmp(ecx, LAST_JS_OBJECT_TYPE); | 12301 __ cmp(ecx, LAST_JS_OBJECT_TYPE); |
| 12286 __ j(above, &slow, not_taken); | 12302 __ j(greater, &slow, not_taken); |
| 12287 | 12303 |
| 12288 // Register mapping: | 12304 // Register mapping: |
| 12289 // eax is object map. | 12305 // eax is object map. |
| 12290 // edx is function. | 12306 // edx is function. |
| 12291 // ebx is function prototype. | 12307 // ebx is function prototype. |
| 12292 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 12308 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
| 12293 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax); | 12309 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax); |
| 12294 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 12310 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
| 12295 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), edx); | 12311 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), edx); |
| 12296 | 12312 |
| (...skipping 979 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13276 // tagged as a small integer. | 13292 // tagged as a small integer. |
| 13277 __ bind(&runtime); | 13293 __ bind(&runtime); |
| 13278 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 13294 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 13279 } | 13295 } |
| 13280 | 13296 |
| 13281 #undef __ | 13297 #undef __ |
| 13282 | 13298 |
| 13283 } } // namespace v8::internal | 13299 } } // namespace v8::internal |
| 13284 | 13300 |
| 13285 #endif // V8_TARGET_ARCH_IA32 | 13301 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |