Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(376)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 1478303002: Revert of [runtime] Replace global object link with native context link in all contexts. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_ARM 5 #if V8_TARGET_ARCH_ARM
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1582 matching lines...) Expand 10 before | Expand all | Expand 10 after
1593 __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT); 1593 __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
1594 1594
1595 // r0 = address of new object(s) (tagged) 1595 // r0 = address of new object(s) (tagged)
1596 // r2 = argument count (smi-tagged) 1596 // r2 = argument count (smi-tagged)
1597 // Get the arguments boilerplate from the current native context into r4. 1597 // Get the arguments boilerplate from the current native context into r4.
1598 const int kNormalOffset = 1598 const int kNormalOffset =
1599 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); 1599 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
1600 const int kAliasedOffset = 1600 const int kAliasedOffset =
1601 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); 1601 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
1602 1602
1603 __ ldr(r4, NativeContextMemOperand()); 1603 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1604 __ ldr(r4, FieldMemOperand(r4, JSGlobalObject::kNativeContextOffset));
1604 __ cmp(r6, Operand::Zero()); 1605 __ cmp(r6, Operand::Zero());
1605 __ ldr(r4, MemOperand(r4, kNormalOffset), eq); 1606 __ ldr(r4, MemOperand(r4, kNormalOffset), eq);
1606 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne); 1607 __ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
1607 1608
1608 // r0 = address of new object (tagged) 1609 // r0 = address of new object (tagged)
1609 // r2 = argument count (smi-tagged) 1610 // r2 = argument count (smi-tagged)
1610 // r4 = address of arguments map (tagged) 1611 // r4 = address of arguments map (tagged)
1611 // r6 = mapped parameter count (tagged) 1612 // r6 = mapped parameter count (tagged)
1612 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset)); 1613 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
1613 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex); 1614 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
1785 __ b(eq, &add_arguments_object); 1786 __ b(eq, &add_arguments_object);
1786 __ add(r9, r9, Operand(FixedArray::kHeaderSize / kPointerSize)); 1787 __ add(r9, r9, Operand(FixedArray::kHeaderSize / kPointerSize));
1787 __ bind(&add_arguments_object); 1788 __ bind(&add_arguments_object);
1788 __ add(r9, r9, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); 1789 __ add(r9, r9, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize));
1789 1790
1790 // Do the allocation of both objects in one go. 1791 // Do the allocation of both objects in one go.
1791 __ Allocate(r9, r0, r4, r5, &runtime, 1792 __ Allocate(r9, r0, r4, r5, &runtime,
1792 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); 1793 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
1793 1794
1794 // Get the arguments boilerplate from the current native context. 1795 // Get the arguments boilerplate from the current native context.
1795 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r4); 1796 __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1797 __ ldr(r4, FieldMemOperand(r4, JSGlobalObject::kNativeContextOffset));
1798 __ ldr(r4, MemOperand(
1799 r4, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX)));
1796 1800
1797 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset)); 1801 __ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
1798 __ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex); 1802 __ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex);
1799 __ str(r5, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 1803 __ str(r5, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1800 __ str(r5, FieldMemOperand(r0, JSObject::kElementsOffset)); 1804 __ str(r5, FieldMemOperand(r0, JSObject::kElementsOffset));
1801 1805
1802 // Get the length (smi tagged) and set that as an in-object property too. 1806 // Get the length (smi tagged) and set that as an in-object property too.
1803 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1807 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1804 __ AssertSmi(r2); 1808 __ AssertSmi(r2);
1805 __ str(r2, 1809 __ str(r2,
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after
2318 2322
2319 __ bind(&check_allocation_site); 2323 __ bind(&check_allocation_site);
2320 // If we came here, we need to see if we are the array function. 2324 // If we came here, we need to see if we are the array function.
2321 // If we didn't have a matching function, and we didn't find the megamorph 2325 // If we didn't have a matching function, and we didn't find the megamorph
2322 // sentinel, then we have in the slot either some other function or an 2326 // sentinel, then we have in the slot either some other function or an
2323 // AllocationSite. 2327 // AllocationSite.
2324 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); 2328 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex);
2325 __ b(ne, &miss); 2329 __ b(ne, &miss);
2326 2330
2327 // Make sure the function is the Array() function 2331 // Make sure the function is the Array() function
2328 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); 2332 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
2329 __ cmp(r1, r5); 2333 __ cmp(r1, r5);
2330 __ b(ne, &megamorphic); 2334 __ b(ne, &megamorphic);
2331 __ jmp(&done); 2335 __ jmp(&done);
2332 2336
2333 __ bind(&miss); 2337 __ bind(&miss);
2334 2338
2335 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 2339 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2336 // megamorphic. 2340 // megamorphic.
2337 __ CompareRoot(r5, Heap::kuninitialized_symbolRootIndex); 2341 __ CompareRoot(r5, Heap::kuninitialized_symbolRootIndex);
2338 __ b(eq, &initialize); 2342 __ b(eq, &initialize);
2339 // MegamorphicSentinel is an immortal immovable object (undefined) so no 2343 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2340 // write-barrier is needed. 2344 // write-barrier is needed.
2341 __ bind(&megamorphic); 2345 __ bind(&megamorphic);
2342 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); 2346 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
2343 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); 2347 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
2344 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); 2348 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
2345 __ jmp(&done); 2349 __ jmp(&done);
2346 2350
2347 // An uninitialized cache is patched with the function 2351 // An uninitialized cache is patched with the function
2348 __ bind(&initialize); 2352 __ bind(&initialize);
2349 2353
2350 // Make sure the function is the Array() function 2354 // Make sure the function is the Array() function
2351 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); 2355 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
2352 __ cmp(r1, r5); 2356 __ cmp(r1, r5);
2353 __ b(ne, &not_array_function); 2357 __ b(ne, &not_array_function);
2354 2358
2355 // The target function is the Array constructor, 2359 // The target function is the Array constructor,
2356 // Create an AllocationSite if we don't already have it, store it in the 2360 // Create an AllocationSite if we don't already have it, store it in the
2357 // slot. 2361 // slot.
2358 CreateAllocationSiteStub create_stub(masm->isolate()); 2362 CreateAllocationSiteStub create_stub(masm->isolate());
2359 CallStubInRecordCallTarget(masm, &create_stub); 2363 CallStubInRecordCallTarget(masm, &create_stub);
2360 __ b(&done); 2364 __ b(&done);
2361 2365
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2406 __ mov(r3, r1); 2410 __ mov(r3, r1);
2407 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2411 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2408 } 2412 }
2409 2413
2410 2414
2411 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 2415 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
2412 // r1 - function 2416 // r1 - function
2413 // r3 - slot id 2417 // r3 - slot id
2414 // r2 - vector 2418 // r2 - vector
2415 // r4 - allocation site (loaded from vector[slot]) 2419 // r4 - allocation site (loaded from vector[slot])
2416 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); 2420 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
2417 __ cmp(r1, r5); 2421 __ cmp(r1, r5);
2418 __ b(ne, miss); 2422 __ b(ne, miss);
2419 2423
2420 __ mov(r0, Operand(arg_count())); 2424 __ mov(r0, Operand(arg_count()));
2421 2425
2422 // Increment the call count for monomorphic function calls. 2426 // Increment the call count for monomorphic function calls.
2423 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); 2427 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
2424 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize)); 2428 __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
2425 __ ldr(r3, FieldMemOperand(r2, 0)); 2429 __ ldr(r3, FieldMemOperand(r2, 0));
2426 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); 2430 __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
2533 2537
2534 // We are going monomorphic, provided we actually have a JSFunction. 2538 // We are going monomorphic, provided we actually have a JSFunction.
2535 __ JumpIfSmi(r1, &miss); 2539 __ JumpIfSmi(r1, &miss);
2536 2540
2537 // Goto miss case if we do not have a function. 2541 // Goto miss case if we do not have a function.
2538 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); 2542 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE);
2539 __ b(ne, &miss); 2543 __ b(ne, &miss);
2540 2544
2541 // Make sure the function is not the Array() function, which requires special 2545 // Make sure the function is not the Array() function, which requires special
2542 // behavior on MISS. 2546 // behavior on MISS.
2543 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4); 2547 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
2544 __ cmp(r1, r4); 2548 __ cmp(r1, r4);
2545 __ b(eq, &miss); 2549 __ b(eq, &miss);
2546 2550
2547 // Make sure the function belongs to the same native context. 2551 // Make sure the function belongs to the same native context (which implies
2552 // the same global object).
2548 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset)); 2553 __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset));
2549 __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX)); 2554 __ ldr(r4, ContextOperand(r4, Context::GLOBAL_OBJECT_INDEX));
2550 __ ldr(ip, NativeContextMemOperand()); 2555 __ ldr(ip, GlobalObjectOperand());
2551 __ cmp(r4, ip); 2556 __ cmp(r4, ip);
2552 __ b(ne, &miss); 2557 __ b(ne, &miss);
2553 2558
2554 // Update stats. 2559 // Update stats.
2555 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); 2560 __ ldr(r4, FieldMemOperand(r2, with_types_offset));
2556 __ add(r4, r4, Operand(Smi::FromInt(1))); 2561 __ add(r4, r4, Operand(Smi::FromInt(1)));
2557 __ str(r4, FieldMemOperand(r2, with_types_offset)); 2562 __ str(r4, FieldMemOperand(r2, with_types_offset));
2558 2563
2559 // Initialize the call counter. 2564 // Initialize the call counter.
2560 __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); 2565 __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
(...skipping 2339 matching lines...) Expand 10 before | Expand all | Expand 10 after
4900 } 4905 }
4901 4906
4902 4907
4903 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { 4908 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
4904 Register context = cp; 4909 Register context = cp;
4905 Register result = r0; 4910 Register result = r0;
4906 Register slot = r2; 4911 Register slot = r2;
4907 4912
4908 // Go up the context chain to the script context. 4913 // Go up the context chain to the script context.
4909 for (int i = 0; i < depth(); ++i) { 4914 for (int i = 0; i < depth(); ++i) {
4910 __ ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX)); 4915 __ ldr(result, ContextOperand(context, Context::PREVIOUS_INDEX));
4911 context = result; 4916 context = result;
4912 } 4917 }
4913 4918
4914 // Load the PropertyCell value at the specified slot. 4919 // Load the PropertyCell value at the specified slot.
4915 __ add(result, context, Operand(slot, LSL, kPointerSizeLog2)); 4920 __ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
4916 __ ldr(result, ContextMemOperand(result)); 4921 __ ldr(result, ContextOperand(result));
4917 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 4922 __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
4918 4923
4919 // If the result is not the_hole, return. Otherwise, handle in the runtime. 4924 // If the result is not the_hole, return. Otherwise, handle in the runtime.
4920 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 4925 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
4921 __ Ret(ne); 4926 __ Ret(ne);
4922 4927
4923 // Fallback to runtime. 4928 // Fallback to runtime.
4924 __ SmiTag(slot); 4929 __ SmiTag(slot);
4925 __ push(slot); 4930 __ push(slot);
4926 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); 4931 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1);
(...skipping 15 matching lines...) Expand all
4942 4947
4943 Label fast_heapobject_case, fast_smi_case, slow_case; 4948 Label fast_heapobject_case, fast_smi_case, slow_case;
4944 4949
4945 if (FLAG_debug_code) { 4950 if (FLAG_debug_code) {
4946 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); 4951 __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
4947 __ Check(ne, kUnexpectedValue); 4952 __ Check(ne, kUnexpectedValue);
4948 } 4953 }
4949 4954
4950 // Go up the context chain to the script context. 4955 // Go up the context chain to the script context.
4951 for (int i = 0; i < depth(); i++) { 4956 for (int i = 0; i < depth(); i++) {
4952 __ ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); 4957 __ ldr(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX));
4953 context = context_temp; 4958 context = context_temp;
4954 } 4959 }
4955 4960
4956 // Load the PropertyCell at the specified slot. 4961 // Load the PropertyCell at the specified slot.
4957 __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); 4962 __ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
4958 __ ldr(cell, ContextMemOperand(cell)); 4963 __ ldr(cell, ContextOperand(cell));
4959 4964
4960 // Load PropertyDetails for the cell (actually only the cell_type and kind). 4965 // Load PropertyDetails for the cell (actually only the cell_type and kind).
4961 __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset)); 4966 __ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
4962 __ SmiUntag(cell_details); 4967 __ SmiUntag(cell_details);
4963 __ and_(cell_details, cell_details, 4968 __ and_(cell_details, cell_details,
4964 Operand(PropertyDetails::PropertyCellTypeField::kMask | 4969 Operand(PropertyDetails::PropertyCellTypeField::kMask |
4965 PropertyDetails::KindField::kMask | 4970 PropertyDetails::KindField::kMask |
4966 PropertyDetails::kAttributesReadOnlyMask)); 4971 PropertyDetails::kAttributesReadOnlyMask));
4967 4972
4968 // Check if PropertyCell holds mutable data. 4973 // Check if PropertyCell holds mutable data.
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after
5367 MemOperand(fp, 6 * kPointerSize), NULL); 5372 MemOperand(fp, 6 * kPointerSize), NULL);
5368 } 5373 }
5369 5374
5370 5375
5371 #undef __ 5376 #undef __
5372 5377
5373 } // namespace internal 5378 } // namespace internal
5374 } // namespace v8 5379 } // namespace v8
5375 5380
5376 #endif // V8_TARGET_ARCH_ARM 5381 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/builtins-arm.cc ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698