OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2441 __ mov(r3, r1); | 2441 __ mov(r3, r1); |
2442 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2442 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
2443 __ TailCallStub(&stub); | 2443 __ TailCallStub(&stub); |
2444 } | 2444 } |
2445 | 2445 |
2446 | 2446 |
2447 void CallICStub::Generate(MacroAssembler* masm) { | 2447 void CallICStub::Generate(MacroAssembler* masm) { |
2448 // r1 - function | 2448 // r1 - function |
2449 // r3 - slot id (Smi) | 2449 // r3 - slot id (Smi) |
2450 // r2 - vector | 2450 // r2 - vector |
2451 const int with_types_offset = | |
2452 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | |
2453 const int generic_offset = | |
2454 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | |
2455 Label extra_checks_or_miss, call, call_function; | 2451 Label extra_checks_or_miss, call, call_function; |
2456 int argc = arg_count(); | 2452 int argc = arg_count(); |
2457 ParameterCount actual(argc); | 2453 ParameterCount actual(argc); |
2458 | 2454 |
2459 // The checks. First, does r1 match the recorded monomorphic target? | 2455 // The checks. First, does r1 match the recorded monomorphic target? |
2460 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2456 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2461 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2457 __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2462 | 2458 |
2463 // We don't know that we have a weak cell. We might have a private symbol | 2459 // We don't know that we have a weak cell. We might have a private symbol |
2464 // or an AllocationSite, but the memory is safe to examine. | 2460 // or an AllocationSite, but the memory is safe to examine. |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2520 __ b(eq, &uninitialized); | 2516 __ b(eq, &uninitialized); |
2521 | 2517 |
2522 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 2518 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
2523 // to handle it here. More complex cases are dealt with in the runtime. | 2519 // to handle it here. More complex cases are dealt with in the runtime. |
2524 __ AssertNotSmi(r4); | 2520 __ AssertNotSmi(r4); |
2525 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); | 2521 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE); |
2526 __ b(ne, &miss); | 2522 __ b(ne, &miss); |
2527 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2523 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2528 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 2524 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
2529 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); | 2525 __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
2530 // We have to update statistics for runtime profiling. | |
2531 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); | |
2532 __ sub(r4, r4, Operand(Smi::FromInt(1))); | |
2533 __ str(r4, FieldMemOperand(r2, with_types_offset)); | |
2534 __ ldr(r4, FieldMemOperand(r2, generic_offset)); | |
2535 __ add(r4, r4, Operand(Smi::FromInt(1))); | |
2536 __ str(r4, FieldMemOperand(r2, generic_offset)); | |
2537 | 2526 |
2538 __ bind(&call); | 2527 __ bind(&call); |
2539 __ mov(r0, Operand(argc)); | 2528 __ mov(r0, Operand(argc)); |
2540 __ Jump(masm->isolate()->builtins()->Call(convert_mode()), | 2529 __ Jump(masm->isolate()->builtins()->Call(convert_mode()), |
2541 RelocInfo::CODE_TARGET); | 2530 RelocInfo::CODE_TARGET); |
2542 | 2531 |
2543 __ bind(&uninitialized); | 2532 __ bind(&uninitialized); |
2544 | 2533 |
2545 // We are going monomorphic, provided we actually have a JSFunction. | 2534 // We are going monomorphic, provided we actually have a JSFunction. |
2546 __ JumpIfSmi(r1, &miss); | 2535 __ JumpIfSmi(r1, &miss); |
2547 | 2536 |
2548 // Goto miss case if we do not have a function. | 2537 // Goto miss case if we do not have a function. |
2549 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 2538 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
2550 __ b(ne, &miss); | 2539 __ b(ne, &miss); |
2551 | 2540 |
2552 // Make sure the function is not the Array() function, which requires special | 2541 // Make sure the function is not the Array() function, which requires special |
2553 // behavior on MISS. | 2542 // behavior on MISS. |
2554 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4); | 2543 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4); |
2555 __ cmp(r1, r4); | 2544 __ cmp(r1, r4); |
2556 __ b(eq, &miss); | 2545 __ b(eq, &miss); |
2557 | 2546 |
2558 // Make sure the function belongs to the same native context. | 2547 // Make sure the function belongs to the same native context. |
2559 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset)); | 2548 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset)); |
2560 __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX)); | 2549 __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX)); |
2561 __ ldr(ip, NativeContextMemOperand()); | 2550 __ ldr(ip, NativeContextMemOperand()); |
2562 __ cmp(r4, ip); | 2551 __ cmp(r4, ip); |
2563 __ b(ne, &miss); | 2552 __ b(ne, &miss); |
2564 | 2553 |
2565 // Update stats. | |
2566 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); | |
2567 __ add(r4, r4, Operand(Smi::FromInt(1))); | |
2568 __ str(r4, FieldMemOperand(r2, with_types_offset)); | |
2569 | |
2570 // Initialize the call counter. | 2554 // Initialize the call counter. |
2571 __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2555 __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2572 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); | 2556 __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
2573 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize)); | 2557 __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize)); |
2574 | 2558 |
2575 // Store the function. Use a stub since we need a frame for allocation. | 2559 // Store the function. Use a stub since we need a frame for allocation. |
2576 // r2 - vector | 2560 // r2 - vector |
2577 // r3 - slot | 2561 // r3 - slot |
2578 // r1 - function | 2562 // r1 - function |
2579 { | 2563 { |
(...skipping 2802 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5382 MemOperand(fp, 6 * kPointerSize), NULL); | 5366 MemOperand(fp, 6 * kPointerSize), NULL); |
5383 } | 5367 } |
5384 | 5368 |
5385 | 5369 |
5386 #undef __ | 5370 #undef __ |
5387 | 5371 |
5388 } // namespace internal | 5372 } // namespace internal |
5389 } // namespace v8 | 5373 } // namespace v8 |
5390 | 5374 |
5391 #endif // V8_TARGET_ARCH_ARM | 5375 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |