Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(402)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 1423713014: PPC: [runtime] Fix ES6 9.2.1 [[Call]] when encountering a classConstructor. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebase Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ppc/builtins-ppc.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_PPC 5 #if V8_TARGET_ARCH_PPC
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1412 matching lines...) Expand 10 before | Expand all | Expand 10 after
1423 __ lbz(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset)); 1423 __ lbz(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
1424 __ TestBit(scratch, Map::kHasNonInstancePrototype, r0); 1424 __ TestBit(scratch, Map::kHasNonInstancePrototype, r0);
1425 __ bne(&slow_case, cr0); 1425 __ bne(&slow_case, cr0);
1426 1426
1427 // Ensure that {function} is not bound. 1427 // Ensure that {function} is not bound.
1428 Register const shared_info = scratch; 1428 Register const shared_info = scratch;
1429 __ LoadP(shared_info, 1429 __ LoadP(shared_info,
1430 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); 1430 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1431 __ lwz(scratch, FieldMemOperand(shared_info, 1431 __ lwz(scratch, FieldMemOperand(shared_info,
1432 SharedFunctionInfo::kCompilerHintsOffset)); 1432 SharedFunctionInfo::kCompilerHintsOffset));
1433 __ TestBit(scratch, 1433 __ TestBit(scratch, SharedFunctionInfo::kBoundBit, r0);
1434 #if V8_TARGET_ARCH_PPC64
1435 SharedFunctionInfo::kBoundFunction,
1436 #else
1437 SharedFunctionInfo::kBoundFunction + kSmiTagSize,
1438 #endif
1439 r0);
1440 __ bne(&slow_case, cr0); 1434 __ bne(&slow_case, cr0);
1441 1435
1442 // Get the "prototype" (or initial map) of the {function}. 1436 // Get the "prototype" (or initial map) of the {function}.
1443 __ LoadP(function_prototype, 1437 __ LoadP(function_prototype,
1444 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 1438 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1445 __ AssertNotSmi(function_prototype); 1439 __ AssertNotSmi(function_prototype);
1446 1440
1447 // Resolve the prototype if the {function} has an initial map. Afterwards the 1441 // Resolve the prototype if the {function} has an initial map. Afterwards the
1448 // {function_prototype} will be either the JSReceiver prototype object or the 1442 // {function_prototype} will be either the JSReceiver prototype object or the
1449 // hole value, which means that no instances of the {function} were created so 1443 // hole value, which means that no instances of the {function} were created so
(...skipping 1079 matching lines...) Expand 10 before | Expand all | Expand 10 after
2529 2523
2530 __ bind(&not_array_function); 2524 __ bind(&not_array_function);
2531 2525
2532 CreateWeakCellStub weak_cell_stub(masm->isolate()); 2526 CreateWeakCellStub weak_cell_stub(masm->isolate());
2533 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); 2527 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
2534 __ bind(&done); 2528 __ bind(&done);
2535 } 2529 }
2536 2530
2537 2531
2538 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { 2532 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2533 // ----------- S t a t e -------------
2534 // -- r4 : the function to call
2535 // -- r6 : the function's shared function info
2536 // -----------------------------------
2539 // Do not transform the receiver for strict mode functions and natives. 2537 // Do not transform the receiver for strict mode functions and natives.
2540 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2541 __ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset)); 2538 __ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset));
2542 __ TestBit(r7, 2539 __ andi(r0, r7, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2543 #if V8_TARGET_ARCH_PPC64 2540 (1 << SharedFunctionInfo::kNativeBit)));
2544 SharedFunctionInfo::kStrictModeFunction,
2545 #else
2546 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
2547 #endif
2548 r0);
2549 __ bne(cont, cr0);
2550
2551 // Do not transform the receiver for native.
2552 __ TestBit(r7,
2553 #if V8_TARGET_ARCH_PPC64
2554 SharedFunctionInfo::kNative,
2555 #else
2556 SharedFunctionInfo::kNative + kSmiTagSize,
2557 #endif
2558 r0);
2559 __ bne(cont, cr0); 2541 __ bne(cont, cr0);
2560 } 2542 }
2561 2543
2562 2544
2563 static void EmitSlowCase(MacroAssembler* masm, int argc) { 2545 static void EmitSlowCase(MacroAssembler* masm, int argc) {
2564 __ mov(r3, Operand(argc)); 2546 __ mov(r3, Operand(argc));
2565 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2547 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2566 } 2548 }
2567 2549
2568 2550
2569 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { 2551 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
2570 // Wrap the receiver and patch it back onto the stack. 2552 // Wrap the receiver and patch it back onto the stack.
2571 { 2553 {
2572 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); 2554 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
2573 __ push(r4); 2555 __ push(r4);
2574 __ mr(r3, r6); 2556 __ mr(r3, r6);
2575 ToObjectStub stub(masm->isolate()); 2557 ToObjectStub stub(masm->isolate());
2576 __ CallStub(&stub); 2558 __ CallStub(&stub);
2577 __ pop(r4); 2559 __ pop(r4);
2578 } 2560 }
2579 __ StoreP(r3, MemOperand(sp, argc * kPointerSize), r0); 2561 __ StoreP(r3, MemOperand(sp, argc * kPointerSize), r0);
2580 __ b(cont); 2562 __ b(cont);
2581 } 2563 }
2582 2564
2583 2565
2566 static void EmitClassConstructorCallCheck(MacroAssembler* masm) {
2567 // ----------- S t a t e -------------
2568 // -- r4 : the function to call
2569 // -- r6 : the function's shared function info
2570 // -----------------------------------
2571 // ClassConstructor Check: ES6 section 9.2.1 [[Call]]
2572 Label non_class_constructor;
2573 // Check whether the current function is a classConstructor.
2574 __ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset));
2575 __ TestBitMask(r7, SharedFunctionInfo::kClassConstructorBits, r0);
2576 __ beq(&non_class_constructor, cr0);
2577 // If we call a classConstructor Function throw a TypeError
2578 // indirectly via the CallFunction builtin.
2579 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
2580 __ bind(&non_class_constructor);
2581 }
2582
2583
2584 static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, 2584 static void CallFunctionNoFeedback(MacroAssembler* masm, int argc,
2585 bool needs_checks, bool call_as_method) { 2585 bool needs_checks, bool call_as_method) {
2586 // r4 : the function to call 2586 // r4 : the function to call
2587 Label slow, wrap, cont; 2587 Label slow, wrap, cont;
2588 2588
2589 if (needs_checks) { 2589 if (needs_checks) {
2590 // Check that the function is really a JavaScript function. 2590 // Check that the function is really a JavaScript function.
2591 // r4: pushed function (to be verified) 2591 // r4: pushed function (to be verified)
2592 __ JumpIfSmi(r4, &slow); 2592 __ JumpIfSmi(r4, &slow);
2593 2593
2594 // Goto slow case if we do not have a function. 2594 // Goto slow case if we do not have a function.
2595 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); 2595 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE);
2596 __ bne(&slow); 2596 __ bne(&slow);
2597 } 2597 }
2598 2598
2599 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2600 EmitClassConstructorCallCheck(masm);
2601
2599 // Fast-case: Invoke the function now. 2602 // Fast-case: Invoke the function now.
2600 // r4: pushed function 2603 // r4: pushed function
2601 ParameterCount actual(argc); 2604 ParameterCount actual(argc);
2602 2605
2603 if (call_as_method) { 2606 if (call_as_method) {
2604 if (needs_checks) { 2607 if (needs_checks) {
2605 EmitContinueIfStrictOrNative(masm, &cont); 2608 EmitContinueIfStrictOrNative(masm, &cont);
2606 } 2609 }
2607 2610
2608 // Compute the receiver in sloppy mode. 2611 // Compute the receiver in sloppy mode.
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
2762 // convincing us that we have a monomorphic JSFunction. 2765 // convincing us that we have a monomorphic JSFunction.
2763 __ JumpIfSmi(r4, &extra_checks_or_miss); 2766 __ JumpIfSmi(r4, &extra_checks_or_miss);
2764 2767
2765 // Increment the call count for monomorphic function calls. 2768 // Increment the call count for monomorphic function calls.
2766 const int count_offset = FixedArray::kHeaderSize + kPointerSize; 2769 const int count_offset = FixedArray::kHeaderSize + kPointerSize;
2767 __ LoadP(r6, FieldMemOperand(r9, count_offset)); 2770 __ LoadP(r6, FieldMemOperand(r9, count_offset));
2768 __ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0); 2771 __ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0);
2769 __ StoreP(r6, FieldMemOperand(r9, count_offset), r0); 2772 __ StoreP(r6, FieldMemOperand(r9, count_offset), r0);
2770 2773
2771 __ bind(&have_js_function); 2774 __ bind(&have_js_function);
2775
2776 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2777 EmitClassConstructorCallCheck(masm);
2778
2772 if (CallAsMethod()) { 2779 if (CallAsMethod()) {
2773 EmitContinueIfStrictOrNative(masm, &cont); 2780 EmitContinueIfStrictOrNative(masm, &cont);
2774 // Compute the receiver in sloppy mode. 2781 // Compute the receiver in sloppy mode.
2775 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0); 2782 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0);
2776 2783
2777 __ JumpIfSmi(r6, &wrap); 2784 __ JumpIfSmi(r6, &wrap);
2778 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE); 2785 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE);
2779 __ blt(&wrap); 2786 __ blt(&wrap);
2780 2787
2781 __ bind(&cont); 2788 __ bind(&cont);
(...skipping 3081 matching lines...) Expand 10 before | Expand all | Expand 10 after
5863 kStackUnwindSpace, NULL, 5870 kStackUnwindSpace, NULL,
5864 MemOperand(fp, 6 * kPointerSize), NULL); 5871 MemOperand(fp, 6 * kPointerSize), NULL);
5865 } 5872 }
5866 5873
5867 5874
5868 #undef __ 5875 #undef __
5869 } // namespace internal 5876 } // namespace internal
5870 } // namespace v8 5877 } // namespace v8
5871 5878
5872 #endif // V8_TARGET_ARCH_PPC 5879 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ppc/builtins-ppc.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698