| Index: src/arm64/code-stubs-arm64.cc
|
| diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
|
| index 90aad4556c2422fa63a28753704d734c1802882e..76807f803f0f1c4d3f10b41e76781b7133fcb1e8 100644
|
| --- a/src/arm64/code-stubs-arm64.cc
|
| +++ b/src/arm64/code-stubs-arm64.cc
|
| @@ -1801,13 +1801,10 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
|
| // x11 sloppy_args_map offset to args (or aliased args) map (uninit)
|
| // x14 arg_count number of function arguments
|
|
|
| - Register global_object = x10;
|
| Register global_ctx = x10;
|
| Register sloppy_args_map = x11;
|
| Register aliased_args_map = x10;
|
| - __ Ldr(global_object, GlobalObjectMemOperand());
|
| - __ Ldr(global_ctx,
|
| - FieldMemOperand(global_object, JSGlobalObject::kNativeContextOffset));
|
| + __ Ldr(global_ctx, NativeContextMemOperand());
|
|
|
| __ Ldr(sloppy_args_map,
|
| ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
|
| @@ -2047,14 +2044,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
| static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
|
|
|
| // Get the arguments boilerplate from the current (native) context.
|
| - Register global_object = x10;
|
| - Register global_ctx = x10;
|
| Register strict_args_map = x4;
|
| - __ Ldr(global_object, GlobalObjectMemOperand());
|
| - __ Ldr(global_ctx,
|
| - FieldMemOperand(global_object, JSGlobalObject::kNativeContextOffset));
|
| - __ Ldr(strict_args_map,
|
| - ContextMemOperand(global_ctx, Context::STRICT_ARGUMENTS_MAP_INDEX));
|
| + __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX,
|
| + strict_args_map);
|
|
|
| // x0 alloc_obj pointer to allocated objects: parameter array and
|
| // arguments object
|
| @@ -2693,7 +2685,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
|
| __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
|
|
|
| // Make sure the function is the Array() function
|
| - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
|
| + __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1);
|
| __ Cmp(function, scratch1);
|
| __ B(ne, &megamorphic);
|
| __ B(&done);
|
| @@ -2717,7 +2709,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
|
| __ Bind(&initialize);
|
|
|
| // Make sure the function is the Array() function
|
| - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
|
| + __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1);
|
| __ Cmp(function, scratch1);
|
| __ B(ne, ¬_array_function);
|
|
|
| @@ -2793,7 +2785,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
|
| Register allocation_site = x4;
|
| Register scratch = x5;
|
|
|
| - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
|
| + __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch);
|
| __ Cmp(function, scratch);
|
| __ B(ne, miss);
|
|
|
| @@ -2928,15 +2920,14 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
|
|
| // Make sure the function is not the Array() function, which requires special
|
| // behavior on MISS.
|
| - __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5);
|
| + __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5);
|
| __ Cmp(function, x5);
|
| __ B(eq, &miss);
|
|
|
| - // Make sure the function belongs to the same native context (which implies
|
| - // the same global object).
|
| + // Make sure the function belongs to the same native context.
|
| __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset));
|
| - __ Ldr(x4, ContextMemOperand(x4, Context::GLOBAL_OBJECT_INDEX));
|
| - __ Ldr(x4, GlobalObjectMemOperand());
|
| + __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX));
|
| + __ Ldr(x5, NativeContextMemOperand());
|
| __ Cmp(x4, x5);
|
| __ B(ne, &miss);
|
|
|
|
|