OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
302 | 302 |
303 __ bind(&check_optimized); | 303 __ bind(&check_optimized); |
304 | 304 |
305 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, t2, t3); | 305 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, t2, t3); |
306 | 306 |
307 // a2 holds native context, a1 points to fixed array of 3-element entries | 307 // a2 holds native context, a1 points to fixed array of 3-element entries |
308 // (native context, optimized code, literals). | 308 // (native context, optimized code, literals). |
309 // The optimized code map must never be empty, so check the first elements. | 309 // The optimized code map must never be empty, so check the first elements. |
310 Label install_optimized; | 310 Label install_optimized; |
311 // Speculatively move code object into t0. | 311 // Speculatively move code object into t0. |
312 __ lw(t0, FieldMemOperand(a1, FixedArray::kHeaderSize + kPointerSize)); | 312 __ lw(t0, FieldMemOperand(a1, SharedFunctionInfo::kFirstCodeSlot)); |
313 __ lw(t1, FieldMemOperand(a1, FixedArray::kHeaderSize)); | 313 __ lw(t1, FieldMemOperand(a1, SharedFunctionInfo::kFirstContextSlot)); |
314 __ Branch(&install_optimized, eq, a2, Operand(t1)); | 314 __ Branch(&install_optimized, eq, a2, Operand(t1)); |
315 | 315 |
316 // Iterate through the rest of map backwards. t0 holds an index as a Smi. | 316 // Iterate through the rest of map backwards. t0 holds an index as a Smi. |
317 Label loop; | 317 Label loop; |
318 __ lw(t0, FieldMemOperand(a1, FixedArray::kLengthOffset)); | 318 __ lw(t0, FieldMemOperand(a1, FixedArray::kLengthOffset)); |
319 __ bind(&loop); | 319 __ bind(&loop); |
320 // Do not double check first entry. | 320 // Do not double check first entry. |
321 | |
322 __ Branch(&install_unoptimized, eq, t0, | 321 __ Branch(&install_unoptimized, eq, t0, |
323 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); | 322 Operand(Smi::FromInt(SharedFunctionInfo::kSecondEntryIndex))); |
324 __ Subu(t0, t0, Operand( | 323 __ Subu(t0, t0, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); |
325 Smi::FromInt(SharedFunctionInfo::kEntryLength))); // Skip an entry. | |
326 __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 324 __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
327 __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize); | 325 __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize); |
328 __ Addu(t1, t1, Operand(at)); | 326 __ Addu(t1, t1, Operand(at)); |
329 __ lw(t1, MemOperand(t1)); | 327 __ lw(t1, MemOperand(t1)); |
330 __ Branch(&loop, ne, a2, Operand(t1)); | 328 __ Branch(&loop, ne, a2, Operand(t1)); |
331 // Hit: fetch the optimized code. | 329 // Hit: fetch the optimized code. |
332 __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 330 __ Addu(t1, a1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
333 __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize); | 331 __ sll(at, t0, kPointerSizeLog2 - kSmiTagSize); |
334 __ Addu(t1, t1, Operand(at)); | 332 __ Addu(t1, t1, Operand(at)); |
335 __ Addu(t1, t1, Operand(kPointerSize)); | 333 __ Addu(t1, t1, Operand(kPointerSize)); |
(...skipping 7459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7795 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | 7793 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
7796 } | 7794 } |
7797 } | 7795 } |
7798 | 7796 |
7799 | 7797 |
7800 #undef __ | 7798 #undef __ |
7801 | 7799 |
7802 } } // namespace v8::internal | 7800 } } // namespace v8::internal |
7803 | 7801 |
7804 #endif // V8_TARGET_ARCH_MIPS | 7802 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |