OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask)); | 203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask)); |
204 __ Branch(miss, ne, at, Operand(zero_reg)); | 204 __ Branch(miss, ne, at, Operand(zero_reg)); |
205 | 205 |
206 // Store the value at the masked, scaled index and return. | 206 // Store the value at the masked, scaled index and return. |
207 const int kValueOffset = kElementsStartOffset + kPointerSize; | 207 const int kValueOffset = kElementsStartOffset + kPointerSize; |
208 __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); | 208 __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); |
209 __ sw(value, MemOperand(scratch2)); | 209 __ sw(value, MemOperand(scratch2)); |
210 | 210 |
211 // Update the write barrier. Make sure not to clobber the value. | 211 // Update the write barrier. Make sure not to clobber the value. |
212 __ mov(scratch1, value); | 212 __ mov(scratch1, value); |
213 __ RecordWrite(elements, scratch2, scratch1); | 213 __ RecordWrite( |
| 214 elements, scratch2, scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs); |
214 } | 215 } |
215 | 216 |
216 | 217 |
217 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { | 218 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { |
218 // ----------- S t a t e ------------- | 219 // ----------- S t a t e ------------- |
219 // -- a2 : name | 220 // -- a2 : name |
220 // -- ra : return address | 221 // -- ra : return address |
221 // -- a0 : receiver | 222 // -- a0 : receiver |
222 // -- sp[0] : receiver | 223 // -- sp[0] : receiver |
223 // ----------------------------------- | 224 // ----------------------------------- |
(...skipping 673 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
897 // ---------- S t a t e -------------- | 898 // ---------- S t a t e -------------- |
898 // -- a0 : value | 899 // -- a0 : value |
899 // -- a1 : key | 900 // -- a1 : key |
900 // -- a2 : receiver | 901 // -- a2 : receiver |
901 // -- lr : return address | 902 // -- lr : return address |
902 // ----------------------------------- | 903 // ----------------------------------- |
903 Label slow, notin; | 904 Label slow, notin; |
904 MemOperand mapped_location = | 905 MemOperand mapped_location = |
905 GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, ¬in, &slow); | 906 GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, ¬in, &slow); |
906 __ sw(a0, mapped_location); | 907 __ sw(a0, mapped_location); |
907 // Verify mapped_location MemOperand is register, with no offset. | 908 __ Addu(t2, a3, t1); |
908 ASSERT_EQ(mapped_location.offset(), 0); | 909 __ mov(t5, a0); |
909 __ RecordWrite(a3, mapped_location.rm(), t5); | 910 __ RecordWrite(a3, t2, t5, kRAHasNotBeenSaved, kDontSaveFPRegs); |
910 __ Ret(USE_DELAY_SLOT); | 911 __ Ret(USE_DELAY_SLOT); |
911 __ mov(v0, a0); // (In delay slot) return the value stored in v0. | 912 __ mov(v0, a0); // (In delay slot) return the value stored in v0. |
912 __ bind(¬in); | 913 __ bind(¬in); |
913 // The unmapped lookup expects that the parameter map is in a3. | 914 // The unmapped lookup expects that the parameter map is in a3. |
914 MemOperand unmapped_location = | 915 MemOperand unmapped_location = |
915 GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow); | 916 GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow); |
916 __ sw(a0, unmapped_location); | 917 __ sw(a0, unmapped_location); |
917 ASSERT_EQ(unmapped_location.offset(), 0); | 918 __ Addu(t2, a3, t0); |
918 __ RecordWrite(a3, unmapped_location.rm(), t5); | 919 __ mov(t5, a0); |
| 920 __ RecordWrite(a3, t2, t5, kRAHasNotBeenSaved, kDontSaveFPRegs); |
919 __ Ret(USE_DELAY_SLOT); | 921 __ Ret(USE_DELAY_SLOT); |
920 __ mov(v0, a0); // (In delay slot) return the value stored in v0. | 922 __ mov(v0, a0); // (In delay slot) return the value stored in v0. |
921 __ bind(&slow); | 923 __ bind(&slow); |
922 GenerateMiss(masm, false); | 924 GenerateMiss(masm, false); |
923 } | 925 } |
924 | 926 |
925 | 927 |
926 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm, | 928 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm, |
927 int argc) { | 929 int argc) { |
928 // ----------- S t a t e ------------- | 930 // ----------- S t a t e ------------- |
(...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1283 __ lw(t3, FieldMemOperand(elements, HeapObject::kMapOffset)); | 1285 __ lw(t3, FieldMemOperand(elements, HeapObject::kMapOffset)); |
1284 __ LoadRoot(t0, Heap::kFixedArrayMapRootIndex); | 1286 __ LoadRoot(t0, Heap::kFixedArrayMapRootIndex); |
1285 __ Branch(&slow, ne, t3, Operand(t0)); | 1287 __ Branch(&slow, ne, t3, Operand(t0)); |
1286 | 1288 |
1287 // Check the key against the length in the array. | 1289 // Check the key against the length in the array. |
1288 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1290 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1289 __ Branch(&extra, hs, key, Operand(t0)); | 1291 __ Branch(&extra, hs, key, Operand(t0)); |
1290 // Fall through to fast case. | 1292 // Fall through to fast case. |
1291 | 1293 |
1292 __ bind(&fast); | 1294 __ bind(&fast); |
| 1295 Register scratch_value = t0; |
| 1296 Register address = t1; |
1293 // Fast case, store the value to the elements backing store. | 1297 // Fast case, store the value to the elements backing store. |
1294 __ Addu(t4, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1298 __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1295 __ sll(t1, key, kPointerSizeLog2 - kSmiTagSize); | 1299 __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize); |
1296 __ Addu(t4, t4, Operand(t1)); | 1300 __ Addu(address, address, scratch_value); |
1297 __ sw(value, MemOperand(t4)); | 1301 __ sw(value, MemOperand(address)); |
1298 // Skip write barrier if the written value is a smi. | 1302 // Skip write barrier if the written value is a smi. |
1299 __ JumpIfSmi(value, &exit); | 1303 __ JumpIfSmi(value, &exit); |
1300 | 1304 |
1301 // Update write barrier for the elements array address. | 1305 // Update write barrier for the elements array address. |
1302 __ Subu(t3, t4, Operand(elements)); | 1306 __ mov(scratch_value, value); // Preserve the value which is returned. |
1303 | 1307 __ RecordWrite(elements, |
1304 __ RecordWrite(elements, Operand(t3), t4, t5); | 1308 address, |
| 1309 scratch_value, |
| 1310 kRAHasNotBeenSaved, |
| 1311 kDontSaveFPRegs, |
| 1312 EMIT_REMEMBERED_SET, |
| 1313 OMIT_SMI_CHECK); |
1305 __ bind(&exit); | 1314 __ bind(&exit); |
1306 | 1315 |
1307 __ mov(v0, a0); // Return the value written. | 1316 __ mov(v0, a0); // Return the value written. |
1308 __ Ret(); | 1317 __ Ret(); |
1309 } | 1318 } |
1310 | 1319 |
1311 | 1320 |
1312 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { | 1321 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { |
1313 // ---------- S t a t e -------------- | 1322 // ---------- S t a t e -------------- |
1314 // -- ra : return address | 1323 // -- ra : return address |
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1629 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); | 1638 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch)); |
1630 patcher.masm()->andi(at, reg, kSmiTagMask); | 1639 patcher.masm()->andi(at, reg, kSmiTagMask); |
1631 patcher.ChangeBranchCondition(eq); | 1640 patcher.ChangeBranchCondition(eq); |
1632 } | 1641 } |
1633 } | 1642 } |
1634 | 1643 |
1635 | 1644 |
1636 } } // namespace v8::internal | 1645 } } // namespace v8::internal |
1637 | 1646 |
1638 #endif // V8_TARGET_ARCH_MIPS | 1647 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |