| Index: src/arm/macro-assembler-arm.cc
|
| diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
|
| index 5a55c6d862a2049cec2cb112b0fa87f01bbb39a2..252bb3534d6ee6da90929154a52c1e366b5f5631 100644
|
| --- a/src/arm/macro-assembler-arm.cc
|
| +++ b/src/arm/macro-assembler-arm.cc
|
| @@ -1521,7 +1521,10 @@
|
| #endif
|
|
|
| // Load the native context of the current context.
|
| - ldr(scratch, ContextMemOperand(scratch, Context::NATIVE_CONTEXT_INDEX));
|
| + int offset =
|
| + Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
|
| + ldr(scratch, FieldMemOperand(scratch, offset));
|
| + ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kNativeContextOffset));
|
|
|
| // Check the context is a native context.
|
| if (emit_debug_code()) {
|
| @@ -2517,8 +2520,7 @@
|
| // You can't call a builtin without a valid frame.
|
| DCHECK(flag == JUMP_FUNCTION || has_frame());
|
|
|
| - LoadNativeContextSlot(native_context_index, r1);
|
| - ldr(r2, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
|
| + GetBuiltinEntry(r2, native_context_index);
|
| if (flag == CALL_FUNCTION) {
|
| call_wrapper.BeforeCall(CallSize(r2));
|
| Call(r2);
|
| @@ -2527,6 +2529,26 @@
|
| DCHECK(flag == JUMP_FUNCTION);
|
| Jump(r2);
|
| }
|
| +}
|
| +
|
| +
|
| +void MacroAssembler::GetBuiltinFunction(Register target,
|
| + int native_context_index) {
|
| + // Load the builtins object into target register.
|
| + ldr(target,
|
| + MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
|
| + ldr(target, FieldMemOperand(target, JSGlobalObject::kNativeContextOffset));
|
| + // Load the JavaScript builtin function from the builtins object.
|
| + ldr(target, ContextOperand(target, native_context_index));
|
| +}
|
| +
|
| +
|
| +void MacroAssembler::GetBuiltinEntry(Register target,
|
| + int native_context_index) {
|
| + DCHECK(!target.is(r1));
|
| + GetBuiltinFunction(r1, native_context_index);
|
| + // Load the code entry point from the builtins object.
|
| + ldr(target, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
|
| }
|
|
|
|
|
| @@ -2660,15 +2682,29 @@
|
| }
|
|
|
|
|
| +void MacroAssembler::LoadGlobalProxy(Register dst) {
|
| + ldr(dst, GlobalObjectOperand());
|
| + ldr(dst, FieldMemOperand(dst, JSGlobalObject::kGlobalProxyOffset));
|
| +}
|
| +
|
| +
|
| void MacroAssembler::LoadTransitionedArrayMapConditional(
|
| ElementsKind expected_kind,
|
| ElementsKind transitioned_kind,
|
| Register map_in_out,
|
| Register scratch,
|
| Label* no_map_match) {
|
| + // Load the global or builtins object from the current context.
|
| + ldr(scratch,
|
| + MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
|
| + ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kNativeContextOffset));
|
| +
|
| // Check that the function's map is the same as the expected cached map.
|
| - LoadNativeContextSlot(Context::JS_ARRAY_MAPS_INDEX, scratch);
|
| - size_t offset = expected_kind * kPointerSize + FixedArrayBase::kHeaderSize;
|
| + ldr(scratch,
|
| + MemOperand(scratch,
|
| + Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
|
| + size_t offset = expected_kind * kPointerSize +
|
| + FixedArrayBase::kHeaderSize;
|
| ldr(ip, FieldMemOperand(scratch, offset));
|
| cmp(map_in_out, ip);
|
| b(ne, no_map_match);
|
| @@ -2680,9 +2716,15 @@
|
| }
|
|
|
|
|
| -void MacroAssembler::LoadNativeContextSlot(int index, Register dst) {
|
| - ldr(dst, NativeContextMemOperand());
|
| - ldr(dst, ContextMemOperand(dst, index));
|
| +void MacroAssembler::LoadGlobalFunction(int index, Register function) {
|
| + // Load the global or builtins object from the current context.
|
| + ldr(function,
|
| + MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
|
| + // Load the native context from the global or builtins object.
|
| + ldr(function,
|
| + FieldMemOperand(function, JSGlobalObject::kNativeContextOffset));
|
| + // Load the function from the native context.
|
| + ldr(function, MemOperand(function, Context::SlotOffset(index)));
|
| }
|
|
|
|
|
|
|