OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5577 frame_->EmitPop(index2); | 5577 frame_->EmitPop(index2); |
5578 frame_->EmitPop(index1); | 5578 frame_->EmitPop(index1); |
5579 frame_->EmitPop(object); | 5579 frame_->EmitPop(object); |
5580 | 5580 |
5581 DeferredSwapElements* deferred = | 5581 DeferredSwapElements* deferred = |
5582 new DeferredSwapElements(object, index1, index2); | 5582 new DeferredSwapElements(object, index1, index2); |
5583 | 5583 |
5584 // Fetch the map and check if array is in fast case. | 5584 // Fetch the map and check if array is in fast case. |
5585 // Check that object doesn't require security checks and | 5585 // Check that object doesn't require security checks and |
5586 // has no indexed interceptor. | 5586 // has no indexed interceptor. |
5587 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE); | 5587 __ CompareObjectType(object, tmp1, tmp2, JS_ARRAY_TYPE); |
5588 deferred->Branch(lt); | 5588 deferred->Branch(ne); |
5589 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset)); | 5589 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset)); |
5590 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | 5590 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); |
5591 deferred->Branch(ne); | 5591 deferred->Branch(ne); |
5592 | 5592 |
5593 // Check the object's elements are in fast case and writable. | 5593 // Check the object's elements are in fast case and writable. |
5594 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset)); | 5594 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset)); |
5595 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset)); | 5595 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset)); |
5596 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | 5596 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
5597 __ cmp(tmp2, ip); | 5597 __ cmp(tmp2, ip); |
5598 deferred->Branch(ne); | 5598 deferred->Branch(ne); |
(...skipping 1533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7132 Register scratch1 = VirtualFrame::scratch0(); | 7132 Register scratch1 = VirtualFrame::scratch0(); |
7133 Register scratch2 = VirtualFrame::scratch1(); | 7133 Register scratch2 = VirtualFrame::scratch1(); |
7134 Register scratch3 = r3; | 7134 Register scratch3 = r3; |
7135 | 7135 |
7136 // Counter will be decremented in the deferred code. Placed here to avoid | 7136 // Counter will be decremented in the deferred code. Placed here to avoid |
7137 // having it in the instruction stream below where patching will occur. | 7137 // having it in the instruction stream below where patching will occur. |
7138 __ IncrementCounter(&Counters::keyed_store_inline, 1, | 7138 __ IncrementCounter(&Counters::keyed_store_inline, 1, |
7139 scratch1, scratch2); | 7139 scratch1, scratch2); |
7140 | 7140 |
7141 | 7141 |
7142 | |
7143 // Load the value, key and receiver from the stack. | 7142 // Load the value, key and receiver from the stack. |
7144 bool value_is_harmless = frame_->KnownSmiAt(0); | 7143 bool value_is_harmless = frame_->KnownSmiAt(0); |
7145 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true; | 7144 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true; |
7146 bool key_is_smi = frame_->KnownSmiAt(1); | 7145 bool key_is_smi = frame_->KnownSmiAt(1); |
7147 Register value = frame_->PopToRegister(); | 7146 Register value = frame_->PopToRegister(); |
7148 Register key = frame_->PopToRegister(value); | 7147 Register key = frame_->PopToRegister(value); |
7149 VirtualFrame::SpilledScope spilled(frame_); | 7148 VirtualFrame::SpilledScope spilled(frame_); |
7150 Register receiver = r2; | 7149 Register receiver = r2; |
7151 frame_->EmitPop(receiver); | 7150 frame_->EmitPop(receiver); |
7152 | 7151 |
(...skipping 27 matching lines...) Expand all Loading... | |
7180 } | 7179 } |
7181 | 7180 |
7182 // Check that the receiver is a heap object. | 7181 // Check that the receiver is a heap object. |
7183 __ tst(receiver, Operand(kSmiTagMask)); | 7182 __ tst(receiver, Operand(kSmiTagMask)); |
7184 deferred->Branch(eq); | 7183 deferred->Branch(eq); |
7185 | 7184 |
7186 // Check that the receiver is a JSArray. | 7185 // Check that the receiver is a JSArray. |
7187 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE); | 7186 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE); |
7188 deferred->Branch(ne); | 7187 deferred->Branch(ne); |
7189 | 7188 |
7190 // Check that the key is within bounds. Both the key and the length of | |
7191 // the JSArray are smis. Use unsigned comparison to handle negative keys. | |
7192 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
7193 __ cmp(scratch1, key); | |
7194 deferred->Branch(ls); // Unsigned less equal. | |
7195 | |
7196 // Get the elements array from the receiver. | 7189 // Get the elements array from the receiver. |
7197 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 7190 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
7198 if (!value_is_harmless && wb_info != LIKELY_SMI) { | 7191 if (!value_is_harmless && wb_info != LIKELY_SMI) { |
7199 Label ok; | 7192 Label ok; |
7200 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask())); | 7193 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask())); |
7201 __ cmp(scratch2, Operand(ExternalReference::new_space_start())); | 7194 __ cmp(scratch2, Operand(ExternalReference::new_space_start())); |
7202 __ tst(value, Operand(kSmiTagMask), ne); | 7195 __ tst(value, Operand(kSmiTagMask), ne); |
7203 deferred->Branch(ne); | 7196 deferred->Branch(ne); |
7204 #ifdef DEBUG | 7197 #ifdef DEBUG |
7205 we_remembered_the_write_barrier = true; | 7198 we_remembered_the_write_barrier = true; |
7206 #endif | 7199 #endif |
7207 } | 7200 } |
7208 // Check that the elements array is not a dictionary. | 7201 // Check that the elements array is not a dictionary. |
7209 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset)); | 7202 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset)); |
7203 | |
7204 // Check that the key is within bounds. Both the key and the length of | |
7205 // the JSArray are smis. Use unsigned comparison to handle negative keys. | |
7206 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
7207 __ cmp(scratch1, key); | |
7208 deferred->Branch(ls); // Unsigned less equal. | |
7209 | |
Lasse Reichstein
2011/03/15 09:07:01
Add to the comment that we know the length is a sm
Lasse Reichstein
2011/03/15 09:07:01
This code now clobbers scratch1 which is assumed t
Erik Corry
2011/03/15 10:00:50
Nice catch. The tests missed this because this wh
Erik Corry
2011/03/15 10:00:50
Done.
| |
7210 // The following instructions are the part of the inlined store keyed | 7210 // The following instructions are the part of the inlined store keyed |
7211 // property code which can be patched. Therefore the exact number of | 7211 // property code which can be patched. Therefore the exact number of |
7212 // instructions generated need to be fixed, so the constant pool is blocked | 7212 // instructions generated need to be fixed, so the constant pool is blocked |
7213 // while generating this code. | 7213 // while generating this code. |
7214 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 7214 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
7215 #ifdef DEBUG | 7215 #ifdef DEBUG |
7216 Label check_inlined_codesize; | 7216 Label check_inlined_codesize; |
7217 masm_->bind(&check_inlined_codesize); | 7217 masm_->bind(&check_inlined_codesize); |
7218 #endif | 7218 #endif |
7219 | 7219 |
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7403 BinaryOpIC::GetName(runtime_operands_type_)); | 7403 BinaryOpIC::GetName(runtime_operands_type_)); |
7404 return name_; | 7404 return name_; |
7405 } | 7405 } |
7406 | 7406 |
7407 | 7407 |
7408 #undef __ | 7408 #undef __ |
7409 | 7409 |
7410 } } // namespace v8::internal | 7410 } } // namespace v8::internal |
7411 | 7411 |
7412 #endif // V8_TARGET_ARCH_ARM | 7412 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |