OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1696 __ Allocate(r11, r3, r11, r7, &runtime, TAG_OBJECT); | 1696 __ Allocate(r11, r3, r11, r7, &runtime, TAG_OBJECT); |
1697 | 1697 |
1698 // r3 = address of new object(s) (tagged) | 1698 // r3 = address of new object(s) (tagged) |
1699 // r5 = argument count (smi-tagged) | 1699 // r5 = argument count (smi-tagged) |
1700 // Get the arguments boilerplate from the current native context into r4. | 1700 // Get the arguments boilerplate from the current native context into r4. |
1701 const int kNormalOffset = | 1701 const int kNormalOffset = |
1702 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | 1702 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
1703 const int kAliasedOffset = | 1703 const int kAliasedOffset = |
1704 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); | 1704 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); |
1705 | 1705 |
1706 __ LoadP(r7, | 1706 __ LoadP(r7, NativeContextMemOperand()); |
1707 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
1708 __ LoadP(r7, FieldMemOperand(r7, JSGlobalObject::kNativeContextOffset)); | |
1709 __ cmpi(r9, Operand::Zero()); | 1707 __ cmpi(r9, Operand::Zero()); |
1710 if (CpuFeatures::IsSupported(ISELECT)) { | 1708 if (CpuFeatures::IsSupported(ISELECT)) { |
1711 __ LoadP(r11, MemOperand(r7, kNormalOffset)); | 1709 __ LoadP(r11, MemOperand(r7, kNormalOffset)); |
1712 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); | 1710 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); |
1713 __ isel(eq, r7, r11, r7); | 1711 __ isel(eq, r7, r11, r7); |
1714 } else { | 1712 } else { |
1715 Label skip4, skip5; | 1713 Label skip4, skip5; |
1716 __ bne(&skip4); | 1714 __ bne(&skip4); |
1717 __ LoadP(r7, MemOperand(r7, kNormalOffset)); | 1715 __ LoadP(r7, MemOperand(r7, kNormalOffset)); |
1718 __ b(&skip5); | 1716 __ b(&skip5); |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1913 __ beq(&add_arguments_object, cr0); | 1911 __ beq(&add_arguments_object, cr0); |
1914 __ addi(r11, r11, Operand(FixedArray::kHeaderSize / kPointerSize)); | 1912 __ addi(r11, r11, Operand(FixedArray::kHeaderSize / kPointerSize)); |
1915 __ bind(&add_arguments_object); | 1913 __ bind(&add_arguments_object); |
1916 __ addi(r11, r11, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); | 1914 __ addi(r11, r11, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); |
1917 | 1915 |
1918 // Do the allocation of both objects in one go. | 1916 // Do the allocation of both objects in one go. |
1919 __ Allocate(r11, r3, r7, r8, &runtime, | 1917 __ Allocate(r11, r3, r7, r8, &runtime, |
1920 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | 1918 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); |
1921 | 1919 |
1922 // Get the arguments boilerplate from the current native context. | 1920 // Get the arguments boilerplate from the current native context. |
1923 __ LoadP(r7, | 1921 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r7); |
1924 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
1925 __ LoadP(r7, FieldMemOperand(r7, JSGlobalObject::kNativeContextOffset)); | |
1926 __ LoadP( | |
1927 r7, | |
1928 MemOperand(r7, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX))); | |
1929 | 1922 |
1930 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); | 1923 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); |
1931 __ LoadRoot(r8, Heap::kEmptyFixedArrayRootIndex); | 1924 __ LoadRoot(r8, Heap::kEmptyFixedArrayRootIndex); |
1932 __ StoreP(r8, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); | 1925 __ StoreP(r8, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); |
1933 __ StoreP(r8, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | 1926 __ StoreP(r8, FieldMemOperand(r3, JSObject::kElementsOffset), r0); |
1934 | 1927 |
1935 // Get the length (smi tagged) and set that as an in-object property too. | 1928 // Get the length (smi tagged) and set that as an in-object property too. |
1936 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1929 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
1937 __ AssertSmi(r5); | 1930 __ AssertSmi(r5); |
1938 __ StoreP(r5, | 1931 __ StoreP(r5, |
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2468 | 2461 |
2469 __ bind(&check_allocation_site); | 2462 __ bind(&check_allocation_site); |
2470 // If we came here, we need to see if we are the array function. | 2463 // If we came here, we need to see if we are the array function. |
2471 // If we didn't have a matching function, and we didn't find the megamorph | 2464 // If we didn't have a matching function, and we didn't find the megamorph |
2472 // sentinel, then we have in the slot either some other function or an | 2465 // sentinel, then we have in the slot either some other function or an |
2473 // AllocationSite. | 2466 // AllocationSite. |
2474 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); | 2467 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); |
2475 __ bne(&miss); | 2468 __ bne(&miss); |
2476 | 2469 |
2477 // Make sure the function is the Array() function | 2470 // Make sure the function is the Array() function |
2478 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); | 2471 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); |
2479 __ cmp(r4, r8); | 2472 __ cmp(r4, r8); |
2480 __ bne(&megamorphic); | 2473 __ bne(&megamorphic); |
2481 __ b(&done); | 2474 __ b(&done); |
2482 | 2475 |
2483 __ bind(&miss); | 2476 __ bind(&miss); |
2484 | 2477 |
2485 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 2478 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
2486 // megamorphic. | 2479 // megamorphic. |
2487 __ CompareRoot(r8, Heap::kuninitialized_symbolRootIndex); | 2480 __ CompareRoot(r8, Heap::kuninitialized_symbolRootIndex); |
2488 __ beq(&initialize); | 2481 __ beq(&initialize); |
2489 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 2482 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
2490 // write-barrier is needed. | 2483 // write-barrier is needed. |
2491 __ bind(&megamorphic); | 2484 __ bind(&megamorphic); |
2492 __ SmiToPtrArrayOffset(r8, r6); | 2485 __ SmiToPtrArrayOffset(r8, r6); |
2493 __ add(r8, r5, r8); | 2486 __ add(r8, r5, r8); |
2494 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 2487 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
2495 __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0); | 2488 __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0); |
2496 __ jmp(&done); | 2489 __ jmp(&done); |
2497 | 2490 |
2498 // An uninitialized cache is patched with the function | 2491 // An uninitialized cache is patched with the function |
2499 __ bind(&initialize); | 2492 __ bind(&initialize); |
2500 | 2493 |
2501 // Make sure the function is the Array() function. | 2494 // Make sure the function is the Array() function. |
2502 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); | 2495 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); |
2503 __ cmp(r4, r8); | 2496 __ cmp(r4, r8); |
2504 __ bne(¬_array_function); | 2497 __ bne(¬_array_function); |
2505 | 2498 |
2506 // The target function is the Array constructor, | 2499 // The target function is the Array constructor, |
2507 // Create an AllocationSite if we don't already have it, store it in the | 2500 // Create an AllocationSite if we don't already have it, store it in the |
2508 // slot. | 2501 // slot. |
2509 CreateAllocationSiteStub create_stub(masm->isolate()); | 2502 CreateAllocationSiteStub create_stub(masm->isolate()); |
2510 CallStubInRecordCallTarget(masm, &create_stub); | 2503 CallStubInRecordCallTarget(masm, &create_stub); |
2511 __ b(&done); | 2504 __ b(&done); |
2512 | 2505 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2565 __ mr(r6, r4); | 2558 __ mr(r6, r4); |
2566 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2559 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2567 } | 2560 } |
2568 | 2561 |
2569 | 2562 |
2570 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { | 2563 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2571 // r4 - function | 2564 // r4 - function |
2572 // r6 - slot id | 2565 // r6 - slot id |
2573 // r5 - vector | 2566 // r5 - vector |
2574 // r7 - allocation site (loaded from vector[slot]) | 2567 // r7 - allocation site (loaded from vector[slot]) |
2575 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); | 2568 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); |
2576 __ cmp(r4, r8); | 2569 __ cmp(r4, r8); |
2577 __ bne(miss); | 2570 __ bne(miss); |
2578 | 2571 |
2579 __ mov(r3, Operand(arg_count())); | 2572 __ mov(r3, Operand(arg_count())); |
2580 | 2573 |
2581 // Increment the call count for monomorphic function calls. | 2574 // Increment the call count for monomorphic function calls. |
2582 const int count_offset = FixedArray::kHeaderSize + kPointerSize; | 2575 const int count_offset = FixedArray::kHeaderSize + kPointerSize; |
2583 __ SmiToPtrArrayOffset(r8, r6); | 2576 __ SmiToPtrArrayOffset(r8, r6); |
2584 __ add(r5, r5, r8); | 2577 __ add(r5, r5, r8); |
2585 __ LoadP(r6, FieldMemOperand(r5, count_offset)); | 2578 __ LoadP(r6, FieldMemOperand(r5, count_offset)); |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2692 | 2685 |
2693 // We are going monomorphic, provided we actually have a JSFunction. | 2686 // We are going monomorphic, provided we actually have a JSFunction. |
2694 __ JumpIfSmi(r4, &miss); | 2687 __ JumpIfSmi(r4, &miss); |
2695 | 2688 |
2696 // Goto miss case if we do not have a function. | 2689 // Goto miss case if we do not have a function. |
2697 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); | 2690 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); |
2698 __ bne(&miss); | 2691 __ bne(&miss); |
2699 | 2692 |
2700 // Make sure the function is not the Array() function, which requires special | 2693 // Make sure the function is not the Array() function, which requires special |
2701 // behavior on MISS. | 2694 // behavior on MISS. |
2702 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r7); | 2695 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); |
2703 __ cmp(r4, r7); | 2696 __ cmp(r4, r7); |
2704 __ beq(&miss); | 2697 __ beq(&miss); |
2705 | 2698 |
2706 // Make sure the function belongs to the same native context (which implies | 2699 // Make sure the function belongs to the same native context. |
2707 // the same global object). | |
2708 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kContextOffset)); | 2700 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kContextOffset)); |
2709 __ LoadP(r7, ContextOperand(r7, Context::GLOBAL_OBJECT_INDEX)); | 2701 __ LoadP(r7, ContextMemOperand(r7, Context::NATIVE_CONTEXT_INDEX)); |
2710 __ LoadP(ip, GlobalObjectOperand()); | 2702 __ LoadP(ip, NativeContextMemOperand()); |
2711 __ cmp(r7, ip); | 2703 __ cmp(r7, ip); |
2712 __ bne(&miss); | 2704 __ bne(&miss); |
2713 | 2705 |
2714 // Update stats. | 2706 // Update stats. |
2715 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); | 2707 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); |
2716 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); | 2708 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); |
2717 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); | 2709 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); |
2718 | 2710 |
2719 // Initialize the call counter. | 2711 // Initialize the call counter. |
2720 __ LoadSmiLiteral(r8, Smi::FromInt(CallICNexus::kCallCountIncrement)); | 2712 __ LoadSmiLiteral(r8, Smi::FromInt(CallICNexus::kCallCountIncrement)); |
(...skipping 2414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5135 } | 5127 } |
5136 | 5128 |
5137 | 5129 |
5138 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5130 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5139 Register context = cp; | 5131 Register context = cp; |
5140 Register result = r3; | 5132 Register result = r3; |
5141 Register slot = r5; | 5133 Register slot = r5; |
5142 | 5134 |
5143 // Go up the context chain to the script context. | 5135 // Go up the context chain to the script context. |
5144 for (int i = 0; i < depth(); ++i) { | 5136 for (int i = 0; i < depth(); ++i) { |
5145 __ LoadP(result, ContextOperand(context, Context::PREVIOUS_INDEX)); | 5137 __ LoadP(result, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
5146 context = result; | 5138 context = result; |
5147 } | 5139 } |
5148 | 5140 |
5149 // Load the PropertyCell value at the specified slot. | 5141 // Load the PropertyCell value at the specified slot. |
5150 __ ShiftLeftImm(r0, slot, Operand(kPointerSizeLog2)); | 5142 __ ShiftLeftImm(r0, slot, Operand(kPointerSizeLog2)); |
5151 __ add(result, context, r0); | 5143 __ add(result, context, r0); |
5152 __ LoadP(result, ContextOperand(result)); | 5144 __ LoadP(result, ContextMemOperand(result)); |
5153 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset)); | 5145 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset)); |
5154 | 5146 |
5155 // If the result is not the_hole, return. Otherwise, handle in the runtime. | 5147 // If the result is not the_hole, return. Otherwise, handle in the runtime. |
5156 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 5148 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
5157 __ Ret(ne); | 5149 __ Ret(ne); |
5158 | 5150 |
5159 // Fallback to runtime. | 5151 // Fallback to runtime. |
5160 __ SmiTag(slot); | 5152 __ SmiTag(slot); |
5161 __ Push(slot); | 5153 __ Push(slot); |
5162 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); | 5154 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); |
(...skipping 15 matching lines...) Expand all Loading... |
5178 | 5170 |
5179 Label fast_heapobject_case, fast_smi_case, slow_case; | 5171 Label fast_heapobject_case, fast_smi_case, slow_case; |
5180 | 5172 |
5181 if (FLAG_debug_code) { | 5173 if (FLAG_debug_code) { |
5182 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); | 5174 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); |
5183 __ Check(ne, kUnexpectedValue); | 5175 __ Check(ne, kUnexpectedValue); |
5184 } | 5176 } |
5185 | 5177 |
5186 // Go up the context chain to the script context. | 5178 // Go up the context chain to the script context. |
5187 for (int i = 0; i < depth(); i++) { | 5179 for (int i = 0; i < depth(); i++) { |
5188 __ LoadP(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX)); | 5180 __ LoadP(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
5189 context = context_temp; | 5181 context = context_temp; |
5190 } | 5182 } |
5191 | 5183 |
5192 // Load the PropertyCell at the specified slot. | 5184 // Load the PropertyCell at the specified slot. |
5193 __ ShiftLeftImm(r0, slot, Operand(kPointerSizeLog2)); | 5185 __ ShiftLeftImm(r0, slot, Operand(kPointerSizeLog2)); |
5194 __ add(cell, context, r0); | 5186 __ add(cell, context, r0); |
5195 __ LoadP(cell, ContextOperand(cell)); | 5187 __ LoadP(cell, ContextMemOperand(cell)); |
5196 | 5188 |
5197 // Load PropertyDetails for the cell (actually only the cell_type and kind). | 5189 // Load PropertyDetails for the cell (actually only the cell_type and kind). |
5198 __ LoadP(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset)); | 5190 __ LoadP(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset)); |
5199 __ SmiUntag(cell_details); | 5191 __ SmiUntag(cell_details); |
5200 __ andi(cell_details, cell_details, | 5192 __ andi(cell_details, cell_details, |
5201 Operand(PropertyDetails::PropertyCellTypeField::kMask | | 5193 Operand(PropertyDetails::PropertyCellTypeField::kMask | |
5202 PropertyDetails::KindField::kMask | | 5194 PropertyDetails::KindField::kMask | |
5203 PropertyDetails::kAttributesReadOnlyMask)); | 5195 PropertyDetails::kAttributesReadOnlyMask)); |
5204 | 5196 |
5205 // Check if PropertyCell holds mutable data. | 5197 // Check if PropertyCell holds mutable data. |
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5649 kStackUnwindSpace, NULL, | 5641 kStackUnwindSpace, NULL, |
5650 MemOperand(fp, 6 * kPointerSize), NULL); | 5642 MemOperand(fp, 6 * kPointerSize), NULL); |
5651 } | 5643 } |
5652 | 5644 |
5653 | 5645 |
5654 #undef __ | 5646 #undef __ |
5655 } // namespace internal | 5647 } // namespace internal |
5656 } // namespace v8 | 5648 } // namespace v8 |
5657 | 5649 |
5658 #endif // V8_TARGET_ARCH_PPC | 5650 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |