Index: src/arm64/builtins-arm64.cc |
diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc |
index 8d2a67af8cd0dfc0123abbb7de321f96b56c3933..088dd32949ba2710446eb6e92d50b10e8facedc4 100644 |
--- a/src/arm64/builtins-arm64.cc |
+++ b/src/arm64/builtins-arm64.cc |
@@ -1740,14 +1740,21 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
Register scratch1 = x13, scratch2 = x14; |
// If the function is strong we need to throw an error. |
- Label weak_function; |
+ Label no_strong_error; |
__ Ldr(scratch1, |
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
__ Ldr(scratch2.W(), |
FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset)); |
__ TestAndBranchIfAllClear(scratch2.W(), |
(1 << SharedFunctionInfo::kStrongModeFunction), |
- &weak_function); |
+ &no_strong_error); |
+ |
+ // What we really care about is the required number of arguments. |
+ __ Ldrsh(scratch2, |
Rodolph Perfetta
2015/05/19 14:37:05
This will load a 16bit value which is not what you
|
+ FieldMemOperand(scratch1, SharedFunctionInfo::kLengthOffset)); |
+ __ lsr(scratch2, scratch2, 1); |
+ __ Cmp(argc_actual, scratch2); |
Rodolph Perfetta
2015/05/19 14:37:06
the two instructions above can be merged:
__ Cm
|
+ __ B(ge, &no_strong_error); |
{ |
FrameScope frame(masm, StackFrame::MANUAL); |
@@ -1755,7 +1762,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
__ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0); |
} |
- __ bind(&weak_function); |
+ __ bind(&no_strong_error); |
Rodolph Perfetta
2015/05/19 14:37:06
No part of your patch but we should use the macro
arv (Not doing code reviews)
2015/05/19 14:56:13
I added that bind so I'm happy to change it to Bin
|
EnterArgumentsAdaptorFrame(masm); |
__ Lsl(argc_expected, argc_expected, kPointerSizeLog2); |