| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 300 | 300 |
| 301 __ bind(&check_optimized); | 301 __ bind(&check_optimized); |
| 302 | 302 |
| 303 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, r6, r7); | 303 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, r6, r7); |
| 304 | 304 |
| 305 // r2 holds native context, r1 points to fixed array of 3-element entries | 305 // r2 holds native context, r1 points to fixed array of 3-element entries |
| 306 // (native context, optimized code, literals). | 306 // (native context, optimized code, literals). |
| 307 // The optimized code map must never be empty, so check the first elements. | 307 // The optimized code map must never be empty, so check the first elements. |
| 308 Label install_optimized; | 308 Label install_optimized; |
| 309 // Speculatively move code object into r4. | 309 // Speculatively move code object into r4. |
| 310 __ ldr(r4, FieldMemOperand(r1, FixedArray::kHeaderSize + kPointerSize)); | 310 __ ldr(r4, FieldMemOperand(r1, SharedFunctionInfo::kFirstCodeSlot)); |
| 311 __ ldr(r5, FieldMemOperand(r1, FixedArray::kHeaderSize)); | 311 __ ldr(r5, FieldMemOperand(r1, SharedFunctionInfo::kFirstContextSlot)); |
| 312 __ cmp(r2, r5); | 312 __ cmp(r2, r5); |
| 313 __ b(eq, &install_optimized); | 313 __ b(eq, &install_optimized); |
| 314 | 314 |
| 315 // Iterate through the rest of map backwards. r4 holds an index as a Smi. | 315 // Iterate through the rest of map backwards. r4 holds an index as a Smi. |
| 316 Label loop; | 316 Label loop; |
| 317 __ ldr(r4, FieldMemOperand(r1, FixedArray::kLengthOffset)); | 317 __ ldr(r4, FieldMemOperand(r1, FixedArray::kLengthOffset)); |
| 318 __ bind(&loop); | 318 __ bind(&loop); |
| 319 // Do not double check first entry. | 319 // Do not double check first entry. |
| 320 | 320 __ cmp(r4, Operand(Smi::FromInt(SharedFunctionInfo::kSecondEntryIndex))); |
| 321 __ cmp(r4, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); | |
| 322 __ b(eq, &install_unoptimized); | 321 __ b(eq, &install_unoptimized); |
| 323 __ sub(r4, r4, Operand( | 322 __ sub(r4, r4, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); |
| 324 Smi::FromInt(SharedFunctionInfo::kEntryLength))); // Skip an entry. | |
| 325 __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 323 __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 326 __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); | 324 __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 327 __ ldr(r5, MemOperand(r5)); | 325 __ ldr(r5, MemOperand(r5)); |
| 328 __ cmp(r2, r5); | 326 __ cmp(r2, r5); |
| 329 __ b(ne, &loop); | 327 __ b(ne, &loop); |
| 330 // Hit: fetch the optimized code. | 328 // Hit: fetch the optimized code. |
| 331 __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 329 __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 332 __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); | 330 __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 333 __ add(r5, r5, Operand(kPointerSize)); | 331 __ add(r5, r5, Operand(kPointerSize)); |
| 334 __ ldr(r4, MemOperand(r5)); | 332 __ ldr(r4, MemOperand(r5)); |
| (...skipping 7070 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7405 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | 7403 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); |
| 7406 } | 7404 } |
| 7407 } | 7405 } |
| 7408 | 7406 |
| 7409 | 7407 |
| 7410 #undef __ | 7408 #undef __ |
| 7411 | 7409 |
| 7412 } } // namespace v8::internal | 7410 } } // namespace v8::internal |
| 7413 | 7411 |
| 7414 #endif // V8_TARGET_ARCH_ARM | 7412 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |