Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(645)

Side by Side Diff: src/builtins/mips64/builtins-mips64.cc

Issue 2829073002: MIPS64: Move load/store instructions to macro-assembler. (Closed)
Patch Set: Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/compiler/mips64/code-generator-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 #include "src/codegen.h" 7 #include "src/codegen.h"
8 #include "src/debug/debug.h" 8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 14 matching lines...) Expand all
25 // -- ... 25 // -- ...
26 // -- sp[8 * (argc - 1)] : first argument 26 // -- sp[8 * (argc - 1)] : first argument
27 // -- sp[8 * agrc] : receiver 27 // -- sp[8 * agrc] : receiver
28 // ----------------------------------- 28 // -----------------------------------
29 __ AssertFunction(a1); 29 __ AssertFunction(a1);
30 30
31 // Make sure we operate in the context of the called function (for example 31 // Make sure we operate in the context of the called function (for example
32 // ConstructStubs implemented in C++ will be run in the context of the caller 32 // ConstructStubs implemented in C++ will be run in the context of the caller
33 // instead of the callee, due to the way that [[Construct]] is defined for 33 // instead of the callee, due to the way that [[Construct]] is defined for
34 // ordinary functions). 34 // ordinary functions).
35 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); 35 __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
36 36
37 // JumpToExternalReference expects a0 to contain the number of arguments 37 // JumpToExternalReference expects a0 to contain the number of arguments
38 // including the receiver and the extra arguments. 38 // including the receiver and the extra arguments.
39 const int num_extra_args = 3; 39 const int num_extra_args = 3;
40 __ Daddu(a0, a0, num_extra_args + 1); 40 __ Daddu(a0, a0, num_extra_args + 1);
41 41
42 // Insert extra arguments. 42 // Insert extra arguments.
43 __ SmiTag(a0); 43 __ SmiTag(a0);
44 __ Push(a0, a1, a3); 44 __ Push(a0, a1, a3);
45 __ SmiUntag(a0); 45 __ SmiUntag(a0);
(...skipping 21 matching lines...) Expand all
67 // -- ra : return address 67 // -- ra : return address
68 // -- sp[...]: constructor arguments 68 // -- sp[...]: constructor arguments
69 // ----------------------------------- 69 // -----------------------------------
70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71 71
72 // Get the InternalArray function. 72 // Get the InternalArray function.
73 GenerateLoadInternalArrayFunction(masm, a1); 73 GenerateLoadInternalArrayFunction(masm, a1);
74 74
75 if (FLAG_debug_code) { 75 if (FLAG_debug_code) {
76 // Initial map for the builtin InternalArray functions should be maps. 76 // Initial map for the builtin InternalArray functions should be maps.
77 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 77 __ Ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
78 __ SmiTst(a2, a4); 78 __ SmiTst(a2, a4);
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, a4, 79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, a4,
80 Operand(zero_reg)); 80 Operand(zero_reg));
81 __ GetObjectType(a2, a3, a4); 81 __ GetObjectType(a2, a3, a4);
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, a4, 82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, a4,
83 Operand(MAP_TYPE)); 83 Operand(MAP_TYPE));
84 } 84 }
85 85
86 // Run the native code for the InternalArray function called as a normal 86 // Run the native code for the InternalArray function called as a normal
87 // function. 87 // function.
88 // Tail call a stub. 88 // Tail call a stub.
89 InternalArrayConstructorStub stub(masm->isolate()); 89 InternalArrayConstructorStub stub(masm->isolate());
90 __ TailCallStub(&stub); 90 __ TailCallStub(&stub);
91 } 91 }
92 92
93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
94 // ----------- S t a t e ------------- 94 // ----------- S t a t e -------------
95 // -- a0 : number of arguments 95 // -- a0 : number of arguments
96 // -- ra : return address 96 // -- ra : return address
97 // -- sp[...]: constructor arguments 97 // -- sp[...]: constructor arguments
98 // ----------------------------------- 98 // -----------------------------------
99 Label generic_array_code; 99 Label generic_array_code;
100 100
101 // Get the Array function. 101 // Get the Array function.
102 GenerateLoadArrayFunction(masm, a1); 102 GenerateLoadArrayFunction(masm, a1);
103 103
104 if (FLAG_debug_code) { 104 if (FLAG_debug_code) {
105 // Initial map for the builtin Array functions should be maps. 105 // Initial map for the builtin Array functions should be maps.
106 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 106 __ Ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
107 __ SmiTst(a2, a4); 107 __ SmiTst(a2, a4);
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, a4, 108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, a4,
109 Operand(zero_reg)); 109 Operand(zero_reg));
110 __ GetObjectType(a2, a3, a4); 110 __ GetObjectType(a2, a3, a4);
111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, a4, 111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, a4,
112 Operand(MAP_TYPE)); 112 Operand(MAP_TYPE));
113 } 113 }
114 114
115 // Run the native code for the Array function called as a normal function. 115 // Run the native code for the Array function called as a normal function.
116 // Tail call a stub. 116 // Tail call a stub.
(...skipping 15 matching lines...) Expand all
132 // ----------------------------------- 132 // -----------------------------------
133 133
134 // 1. Load the first argument into a0 and get rid of the rest (including the 134 // 1. Load the first argument into a0 and get rid of the rest (including the
135 // receiver). 135 // receiver).
136 Label no_arguments; 136 Label no_arguments;
137 { 137 {
138 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); 138 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
139 __ Dsubu(t1, a0, Operand(1)); // In delay slot. 139 __ Dsubu(t1, a0, Operand(1)); // In delay slot.
140 __ mov(t0, a0); // Store argc in t0. 140 __ mov(t0, a0); // Store argc in t0.
141 __ Dlsa(at, sp, t1, kPointerSizeLog2); 141 __ Dlsa(at, sp, t1, kPointerSizeLog2);
142 __ ld(a0, MemOperand(at)); 142 __ Ld(a0, MemOperand(at));
143 } 143 }
144 144
145 // 2a. Convert first argument to number. 145 // 2a. Convert first argument to number.
146 { 146 {
147 FrameScope scope(masm, StackFrame::MANUAL); 147 FrameScope scope(masm, StackFrame::MANUAL);
148 __ SmiTag(t0); 148 __ SmiTag(t0);
149 __ EnterBuiltinFrame(cp, a1, t0); 149 __ EnterBuiltinFrame(cp, a1, t0);
150 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 150 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
151 __ LeaveBuiltinFrame(cp, a1, t0); 151 __ LeaveBuiltinFrame(cp, a1, t0);
152 __ SmiUntag(t0); 152 __ SmiUntag(t0);
(...skipping 16 matching lines...) Expand all
169 // -- a0 : number of arguments 169 // -- a0 : number of arguments
170 // -- a1 : constructor function 170 // -- a1 : constructor function
171 // -- a3 : new target 171 // -- a3 : new target
172 // -- cp : context 172 // -- cp : context
173 // -- ra : return address 173 // -- ra : return address
174 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) 174 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
175 // -- sp[argc * 8] : receiver 175 // -- sp[argc * 8] : receiver
176 // ----------------------------------- 176 // -----------------------------------
177 177
178 // 1. Make sure we operate in the context of the called function. 178 // 1. Make sure we operate in the context of the called function.
179 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); 179 __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
180 180
181 // 2. Load the first argument into a0 and get rid of the rest (including the 181 // 2. Load the first argument into a0 and get rid of the rest (including the
182 // receiver). 182 // receiver).
183 { 183 {
184 Label no_arguments, done; 184 Label no_arguments, done;
185 __ mov(t0, a0); // Store argc in t0. 185 __ mov(t0, a0); // Store argc in t0.
186 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); 186 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
187 __ Dsubu(a0, a0, Operand(1)); // In delay slot. 187 __ Dsubu(a0, a0, Operand(1)); // In delay slot.
188 __ Dlsa(at, sp, a0, kPointerSizeLog2); 188 __ Dlsa(at, sp, a0, kPointerSizeLog2);
189 __ ld(a0, MemOperand(at)); 189 __ Ld(a0, MemOperand(at));
190 __ jmp(&done); 190 __ jmp(&done);
191 __ bind(&no_arguments); 191 __ bind(&no_arguments);
192 __ Move(a0, Smi::kZero); 192 __ Move(a0, Smi::kZero);
193 __ bind(&done); 193 __ bind(&done);
194 } 194 }
195 195
196 // 3. Make sure a0 is a number. 196 // 3. Make sure a0 is a number.
197 { 197 {
198 Label done_convert; 198 Label done_convert;
199 __ JumpIfSmi(a0, &done_convert); 199 __ JumpIfSmi(a0, &done_convert);
(...skipping 27 matching lines...) Expand all
227 FrameScope scope(masm, StackFrame::MANUAL); 227 FrameScope scope(masm, StackFrame::MANUAL);
228 __ SmiTag(t0); 228 __ SmiTag(t0);
229 __ EnterBuiltinFrame(cp, a1, t0); 229 __ EnterBuiltinFrame(cp, a1, t0);
230 __ Push(a0); 230 __ Push(a0);
231 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), 231 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
232 RelocInfo::CODE_TARGET); 232 RelocInfo::CODE_TARGET);
233 __ Pop(a0); 233 __ Pop(a0);
234 __ LeaveBuiltinFrame(cp, a1, t0); 234 __ LeaveBuiltinFrame(cp, a1, t0);
235 __ SmiUntag(t0); 235 __ SmiUntag(t0);
236 } 236 }
237 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); 237 __ Sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));
238 238
239 __ bind(&drop_frame_and_ret); 239 __ bind(&drop_frame_and_ret);
240 { 240 {
241 __ Dlsa(sp, sp, t0, kPointerSizeLog2); 241 __ Dlsa(sp, sp, t0, kPointerSizeLog2);
242 __ DropAndRet(1); 242 __ DropAndRet(1);
243 } 243 }
244 } 244 }
245 245
246 // static 246 // static
247 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { 247 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
248 // ----------- S t a t e ------------- 248 // ----------- S t a t e -------------
249 // -- a0 : number of arguments 249 // -- a0 : number of arguments
250 // -- a1 : constructor function 250 // -- a1 : constructor function
251 // -- cp : context 251 // -- cp : context
252 // -- ra : return address 252 // -- ra : return address
253 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) 253 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
254 // -- sp[argc * 8] : receiver 254 // -- sp[argc * 8] : receiver
255 // ----------------------------------- 255 // -----------------------------------
256 256
257 // 1. Load the first argument into a0 and get rid of the rest (including the 257 // 1. Load the first argument into a0 and get rid of the rest (including the
258 // receiver). 258 // receiver).
259 Label no_arguments; 259 Label no_arguments;
260 { 260 {
261 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); 261 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
262 __ Dsubu(t1, a0, Operand(1)); // In delay slot. 262 __ Dsubu(t1, a0, Operand(1)); // In delay slot.
263 __ mov(t0, a0); // Store argc in t0. 263 __ mov(t0, a0); // Store argc in t0.
264 __ Dlsa(at, sp, t1, kPointerSizeLog2); 264 __ Dlsa(at, sp, t1, kPointerSizeLog2);
265 __ ld(a0, MemOperand(at)); 265 __ Ld(a0, MemOperand(at));
266 } 266 }
267 267
268 // 2a. At least one argument, return a0 if it's a string, otherwise 268 // 2a. At least one argument, return a0 if it's a string, otherwise
269 // dispatch to appropriate conversion. 269 // dispatch to appropriate conversion.
270 Label drop_frame_and_ret, to_string, symbol_descriptive_string; 270 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
271 { 271 {
272 __ JumpIfSmi(a0, &to_string); 272 __ JumpIfSmi(a0, &to_string);
273 __ GetObjectType(a0, t1, t1); 273 __ GetObjectType(a0, t1, t1);
274 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); 274 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
275 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE)); 275 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE));
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 // -- a0 : number of arguments 319 // -- a0 : number of arguments
320 // -- a1 : constructor function 320 // -- a1 : constructor function
321 // -- a3 : new target 321 // -- a3 : new target
322 // -- cp : context 322 // -- cp : context
323 // -- ra : return address 323 // -- ra : return address
324 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) 324 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
325 // -- sp[argc * 8] : receiver 325 // -- sp[argc * 8] : receiver
326 // ----------------------------------- 326 // -----------------------------------
327 327
328 // 1. Make sure we operate in the context of the called function. 328 // 1. Make sure we operate in the context of the called function.
329 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); 329 __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
330 330
331 // 2. Load the first argument into a0 and get rid of the rest (including the 331 // 2. Load the first argument into a0 and get rid of the rest (including the
332 // receiver). 332 // receiver).
333 { 333 {
334 Label no_arguments, done; 334 Label no_arguments, done;
335 __ mov(t0, a0); // Store argc in t0. 335 __ mov(t0, a0); // Store argc in t0.
336 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); 336 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
337 __ Dsubu(a0, a0, Operand(1)); 337 __ Dsubu(a0, a0, Operand(1));
338 __ Dlsa(at, sp, a0, kPointerSizeLog2); 338 __ Dlsa(at, sp, a0, kPointerSizeLog2);
339 __ ld(a0, MemOperand(at)); 339 __ Ld(a0, MemOperand(at));
340 __ jmp(&done); 340 __ jmp(&done);
341 __ bind(&no_arguments); 341 __ bind(&no_arguments);
342 __ LoadRoot(a0, Heap::kempty_stringRootIndex); 342 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
343 __ bind(&done); 343 __ bind(&done);
344 } 344 }
345 345
346 // 3. Make sure a0 is a string. 346 // 3. Make sure a0 is a string.
347 { 347 {
348 Label convert, done_convert; 348 Label convert, done_convert;
349 __ JumpIfSmi(a0, &convert); 349 __ JumpIfSmi(a0, &convert);
(...skipping 29 matching lines...) Expand all
379 FrameScope scope(masm, StackFrame::MANUAL); 379 FrameScope scope(masm, StackFrame::MANUAL);
380 __ SmiTag(t0); 380 __ SmiTag(t0);
381 __ EnterBuiltinFrame(cp, a1, t0); 381 __ EnterBuiltinFrame(cp, a1, t0);
382 __ Push(a0); 382 __ Push(a0);
383 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), 383 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
384 RelocInfo::CODE_TARGET); 384 RelocInfo::CODE_TARGET);
385 __ Pop(a0); 385 __ Pop(a0);
386 __ LeaveBuiltinFrame(cp, a1, t0); 386 __ LeaveBuiltinFrame(cp, a1, t0);
387 __ SmiUntag(t0); 387 __ SmiUntag(t0);
388 } 388 }
389 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); 389 __ Sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));
390 390
391 __ bind(&drop_frame_and_ret); 391 __ bind(&drop_frame_and_ret);
392 { 392 {
393 __ Dlsa(sp, sp, t0, kPointerSizeLog2); 393 __ Dlsa(sp, sp, t0, kPointerSizeLog2);
394 __ DropAndRet(1); 394 __ DropAndRet(1);
395 } 395 }
396 } 396 }
397 397
398 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 398 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
399 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 399 __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
400 __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); 400 __ Ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
401 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); 401 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
402 __ Jump(at); 402 __ Jump(at);
403 } 403 }
404 404
405 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, 405 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
406 Runtime::FunctionId function_id) { 406 Runtime::FunctionId function_id) {
407 // ----------- S t a t e ------------- 407 // ----------- S t a t e -------------
408 // -- a0 : argument count (preserved for callee) 408 // -- a0 : argument count (preserved for callee)
409 // -- a1 : target function (preserved for callee) 409 // -- a1 : target function (preserved for callee)
410 // -- a3 : new target (preserved for callee) 410 // -- a3 : new target (preserved for callee)
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
473 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), 473 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
474 RelocInfo::CODE_TARGET); 474 RelocInfo::CODE_TARGET);
475 __ mov(t0, v0); 475 __ mov(t0, v0);
476 __ Pop(a1, a3); 476 __ Pop(a1, a3);
477 477
478 // ----------- S t a t e ------------- 478 // ----------- S t a t e -------------
479 // -- a1: constructor function 479 // -- a1: constructor function
480 // -- a3: new target 480 // -- a3: new target
481 // -- t0: newly allocated object 481 // -- t0: newly allocated object
482 // ----------------------------------- 482 // -----------------------------------
483 __ ld(a0, MemOperand(sp)); 483 __ Ld(a0, MemOperand(sp));
484 } 484 }
485 __ SmiUntag(a0); 485 __ SmiUntag(a0);
486 486
487 if (create_implicit_receiver) { 487 if (create_implicit_receiver) {
488 // Push the allocated receiver to the stack. We need two copies 488 // Push the allocated receiver to the stack. We need two copies
489 // because we may have to return the original one and the calling 489 // because we may have to return the original one and the calling
490 // conventions dictate that the called function pops the receiver. 490 // conventions dictate that the called function pops the receiver.
491 __ Push(t0, t0); 491 __ Push(t0, t0);
492 } else { 492 } else {
493 __ PushRoot(Heap::kTheHoleValueRootIndex); 493 __ PushRoot(Heap::kTheHoleValueRootIndex);
(...skipping 12 matching lines...) Expand all
506 // a3: new target 506 // a3: new target
507 // t0: number of arguments (smi-tagged) 507 // t0: number of arguments (smi-tagged)
508 // sp[0]: receiver 508 // sp[0]: receiver
509 // sp[1]: receiver 509 // sp[1]: receiver
510 // sp[2]: number of arguments (smi-tagged) 510 // sp[2]: number of arguments (smi-tagged)
511 Label loop, entry; 511 Label loop, entry;
512 __ mov(t0, a0); 512 __ mov(t0, a0);
513 __ jmp(&entry); 513 __ jmp(&entry);
514 __ bind(&loop); 514 __ bind(&loop);
515 __ Dlsa(a4, a2, t0, kPointerSizeLog2); 515 __ Dlsa(a4, a2, t0, kPointerSizeLog2);
516 __ ld(a5, MemOperand(a4)); 516 __ Ld(a5, MemOperand(a4));
517 __ push(a5); 517 __ push(a5);
518 __ bind(&entry); 518 __ bind(&entry);
519 __ Daddu(t0, t0, Operand(-1)); 519 __ Daddu(t0, t0, Operand(-1));
520 __ Branch(&loop, greater_equal, t0, Operand(zero_reg)); 520 __ Branch(&loop, greater_equal, t0, Operand(zero_reg));
521 521
522 // Call the function. 522 // Call the function.
523 // a0: number of arguments 523 // a0: number of arguments
524 // a1: constructor function 524 // a1: constructor function
525 // a3: new target 525 // a3: new target
526 ParameterCount actual(a0); 526 ParameterCount actual(a0);
527 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION, 527 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
528 CheckDebugStepCallWrapper()); 528 CheckDebugStepCallWrapper());
529 529
530 // Store offset of return address for deoptimizer. 530 // Store offset of return address for deoptimizer.
531 if (create_implicit_receiver && !disallow_non_object_return && 531 if (create_implicit_receiver && !disallow_non_object_return &&
532 !is_api_function) { 532 !is_api_function) {
533 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset( 533 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
534 masm->pc_offset()); 534 masm->pc_offset());
535 } 535 }
536 536
537 // Restore context from the frame. 537 // Restore context from the frame.
538 __ ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); 538 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
539 539
540 if (create_implicit_receiver) { 540 if (create_implicit_receiver) {
541 // If the result is an object (in the ECMA sense), we should get rid 541 // If the result is an object (in the ECMA sense), we should get rid
542 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 542 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
543 // on page 74. 543 // on page 74.
544 Label use_receiver, return_value, do_throw; 544 Label use_receiver, return_value, do_throw;
545 545
546 // If the result is a smi, it is *not* an object in the ECMA sense. 546 // If the result is a smi, it is *not* an object in the ECMA sense.
547 // v0: result 547 // v0: result
548 // sp[0]: receiver (newly allocated object) 548 // sp[0]: receiver (newly allocated object)
(...skipping 15 matching lines...) Expand all
564 Operand(FIRST_JS_RECEIVER_TYPE)); 564 Operand(FIRST_JS_RECEIVER_TYPE));
565 565
566 if (disallow_non_object_return) { 566 if (disallow_non_object_return) {
567 __ bind(&do_throw); 567 __ bind(&do_throw);
568 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject); 568 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
569 } 569 }
570 570
571 // Throw away the result of the constructor invocation and use the 571 // Throw away the result of the constructor invocation and use the
572 // on-stack receiver as the result. 572 // on-stack receiver as the result.
573 __ bind(&use_receiver); 573 __ bind(&use_receiver);
574 __ ld(v0, MemOperand(sp)); 574 __ Ld(v0, MemOperand(sp));
575 575
576 // Remove receiver from the stack, remove caller arguments, and 576 // Remove receiver from the stack, remove caller arguments, and
577 // return. 577 // return.
578 __ bind(&return_value); 578 __ bind(&return_value);
579 // v0: result 579 // v0: result
580 // sp[0]: receiver (newly allocated object) 580 // sp[0]: receiver (newly allocated object)
581 // sp[1]: number of arguments (smi-tagged) 581 // sp[1]: number of arguments (smi-tagged)
582 __ ld(a1, MemOperand(sp, 1 * kPointerSize)); 582 __ Ld(a1, MemOperand(sp, 1 * kPointerSize));
583 } else { 583 } else {
584 __ ld(a1, MemOperand(sp)); 584 __ Ld(a1, MemOperand(sp));
585 } 585 }
586 586
587 // Leave construct frame. 587 // Leave construct frame.
588 } 588 }
589 589
590 // ES6 9.2.2. Step 13+ 590 // ES6 9.2.2. Step 13+
591 // For derived class constructors, throw a TypeError here if the result 591 // For derived class constructors, throw a TypeError here if the result
592 // is not a JSReceiver. For the base constructor, we've already checked 592 // is not a JSReceiver. For the base constructor, we've already checked
593 // the result, so we omit the check. 593 // the result, so we omit the check.
594 if (disallow_non_object_return && !create_implicit_receiver) { 594 if (disallow_non_object_return && !create_implicit_receiver) {
(...skipping 28 matching lines...) Expand all
623 623
624 // ----------- S t a t e ------------- 624 // ----------- S t a t e -------------
625 // -- a0 : newly allocated object 625 // -- a0 : newly allocated object
626 // -- sp[0] : constructor function 626 // -- sp[0] : constructor function
627 // ----------------------------------- 627 // -----------------------------------
628 628
629 __ Pop(a1); 629 __ Pop(a1);
630 __ Push(a0, a0); 630 __ Push(a0, a0);
631 631
632 // Retrieve smi-tagged arguments count from the stack. 632 // Retrieve smi-tagged arguments count from the stack.
633 __ ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); 633 __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
634 __ SmiUntag(a0); 634 __ SmiUntag(a0);
635 635
636 // Retrieve the new target value from the stack. This was placed into the 636 // Retrieve the new target value from the stack. This was placed into the
637 // frame description in place of the receiver by the optimizing compiler. 637 // frame description in place of the receiver by the optimizing compiler.
638 __ Daddu(a3, fp, Operand(StandardFrameConstants::kCallerSPOffset)); 638 __ Daddu(a3, fp, Operand(StandardFrameConstants::kCallerSPOffset));
639 __ Dlsa(a3, a3, a0, kPointerSizeLog2); 639 __ Dlsa(a3, a3, a0, kPointerSizeLog2);
640 __ ld(a3, MemOperand(a3)); 640 __ Ld(a3, MemOperand(a3));
641 641
642 // Continue with constructor function invocation. 642 // Continue with constructor function invocation.
643 __ jmp(&post_instantiation_deopt_entry); 643 __ jmp(&post_instantiation_deopt_entry);
644 } 644 }
645 } 645 }
646 646
647 } // namespace 647 } // namespace
648 648
649 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 649 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
650 Generate_JSConstructStubHelper(masm, false, true, false); 650 Generate_JSConstructStubHelper(masm, false, true, false);
(...skipping 28 matching lines...) Expand all
679 __ SmiUntag(a3); 679 __ SmiUntag(a3);
680 __ AssertGeneratorObject(a1, a3); 680 __ AssertGeneratorObject(a1, a3);
681 681
682 // Store input value into generator object. 682 // Store input value into generator object.
683 Label async_await, done_store_input; 683 Label async_await, done_store_input;
684 684
685 __ And(t8, a3, Operand(static_cast<int>(SuspendFlags::kAsyncGeneratorAwait))); 685 __ And(t8, a3, Operand(static_cast<int>(SuspendFlags::kAsyncGeneratorAwait)));
686 __ Branch(&async_await, equal, t8, 686 __ Branch(&async_await, equal, t8,
687 Operand(static_cast<int>(SuspendFlags::kAsyncGeneratorAwait))); 687 Operand(static_cast<int>(SuspendFlags::kAsyncGeneratorAwait)));
688 688
689 __ sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset)); 689 __ Sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
690 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3, 690 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
691 kRAHasNotBeenSaved, kDontSaveFPRegs); 691 kRAHasNotBeenSaved, kDontSaveFPRegs);
692 __ jmp(&done_store_input); 692 __ jmp(&done_store_input);
693 693
694 __ bind(&async_await); 694 __ bind(&async_await);
695 __ sd(v0, FieldMemOperand( 695 __ Sd(v0, FieldMemOperand(
696 a1, JSAsyncGeneratorObject::kAwaitInputOrDebugPosOffset)); 696 a1, JSAsyncGeneratorObject::kAwaitInputOrDebugPosOffset));
697 __ RecordWriteField(a1, JSAsyncGeneratorObject::kAwaitInputOrDebugPosOffset, 697 __ RecordWriteField(a1, JSAsyncGeneratorObject::kAwaitInputOrDebugPosOffset,
698 v0, a3, kRAHasNotBeenSaved, kDontSaveFPRegs); 698 v0, a3, kRAHasNotBeenSaved, kDontSaveFPRegs);
699 699
700 __ bind(&done_store_input); 700 __ bind(&done_store_input);
701 // `a3` no longer holds SuspendFlags 701 // `a3` no longer holds SuspendFlags
702 702
703 // Store resume mode into generator object. 703 // Store resume mode into generator object.
704 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset)); 704 __ Sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
705 705
706 // Load suspended function and context. 706 // Load suspended function and context.
707 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); 707 __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
708 __ ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset)); 708 __ Ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset));
709 709
710 // Flood function if we are stepping. 710 // Flood function if we are stepping.
711 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; 711 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
712 Label stepping_prepared; 712 Label stepping_prepared;
713 ExternalReference debug_hook = 713 ExternalReference debug_hook =
714 ExternalReference::debug_hook_on_function_call_address(masm->isolate()); 714 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
715 __ li(a5, Operand(debug_hook)); 715 __ li(a5, Operand(debug_hook));
716 __ lb(a5, MemOperand(a5)); 716 __ Lb(a5, MemOperand(a5));
717 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg)); 717 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
718 718
719 // Flood function if we need to continue stepping in the suspended generator. 719 // Flood function if we need to continue stepping in the suspended generator.
720 ExternalReference debug_suspended_generator = 720 ExternalReference debug_suspended_generator =
721 ExternalReference::debug_suspended_generator_address(masm->isolate()); 721 ExternalReference::debug_suspended_generator_address(masm->isolate());
722 __ li(a5, Operand(debug_suspended_generator)); 722 __ li(a5, Operand(debug_suspended_generator));
723 __ ld(a5, MemOperand(a5)); 723 __ Ld(a5, MemOperand(a5));
724 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5)); 724 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
725 __ bind(&stepping_prepared); 725 __ bind(&stepping_prepared);
726 726
727 // Push receiver. 727 // Push receiver.
728 __ ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); 728 __ Ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
729 __ Push(a5); 729 __ Push(a5);
730 730
731 // ----------- S t a t e ------------- 731 // ----------- S t a t e -------------
732 // -- a1 : the JSGeneratorObject to resume 732 // -- a1 : the JSGeneratorObject to resume
733 // -- a2 : the resume mode (tagged) 733 // -- a2 : the resume mode (tagged)
734 // -- a4 : generator function 734 // -- a4 : generator function
735 // -- cp : generator context 735 // -- cp : generator context
736 // -- ra : return address 736 // -- ra : return address
737 // -- sp[0] : generator receiver 737 // -- sp[0] : generator receiver
738 // ----------------------------------- 738 // -----------------------------------
739 739
740 // Push holes for arguments to generator function. Since the parser forced 740 // Push holes for arguments to generator function. Since the parser forced
741 // context allocation for any variables in generators, the actual argument 741 // context allocation for any variables in generators, the actual argument
742 // values have already been copied into the context and these dummy values 742 // values have already been copied into the context and these dummy values
743 // will never be used. 743 // will never be used.
744 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); 744 __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
745 __ lw(a3, 745 __ Lw(a3,
746 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); 746 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
747 { 747 {
748 Label done_loop, loop; 748 Label done_loop, loop;
749 __ bind(&loop); 749 __ bind(&loop);
750 __ Dsubu(a3, a3, Operand(1)); 750 __ Dsubu(a3, a3, Operand(1));
751 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); 751 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
752 __ PushRoot(Heap::kTheHoleValueRootIndex); 752 __ PushRoot(Heap::kTheHoleValueRootIndex);
753 __ Branch(&loop); 753 __ Branch(&loop);
754 __ bind(&done_loop); 754 __ bind(&done_loop);
755 } 755 }
756 756
757 // Underlying function needs to have bytecode available. 757 // Underlying function needs to have bytecode available.
758 if (FLAG_debug_code) { 758 if (FLAG_debug_code) {
759 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); 759 __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
760 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); 760 __ Ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
761 __ GetObjectType(a3, a3, a3); 761 __ GetObjectType(a3, a3, a3);
762 __ Assert(eq, kMissingBytecodeArray, a3, Operand(BYTECODE_ARRAY_TYPE)); 762 __ Assert(eq, kMissingBytecodeArray, a3, Operand(BYTECODE_ARRAY_TYPE));
763 } 763 }
764 764
765 // Resume (Ignition/TurboFan) generator object. 765 // Resume (Ignition/TurboFan) generator object.
766 { 766 {
767 __ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); 767 __ Ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
768 __ lw(a0, 768 __ Lw(a0,
769 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); 769 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
770 // We abuse new.target both to indicate that this is a resume call and to 770 // We abuse new.target both to indicate that this is a resume call and to
771 // pass in the generator object. In ordinary calls, new.target is always 771 // pass in the generator object. In ordinary calls, new.target is always
772 // undefined because generator functions are non-constructable. 772 // undefined because generator functions are non-constructable.
773 __ Move(a3, a1); 773 __ Move(a3, a1);
774 __ Move(a1, a4); 774 __ Move(a1, a4);
775 __ ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 775 __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
776 __ Jump(a2); 776 __ Jump(a2);
777 } 777 }
778 778
779 __ bind(&prepare_step_in_if_stepping); 779 __ bind(&prepare_step_in_if_stepping);
780 { 780 {
781 FrameScope scope(masm, StackFrame::INTERNAL); 781 FrameScope scope(masm, StackFrame::INTERNAL);
782 __ Push(a1, a2, a4); 782 __ Push(a1, a2, a4);
783 __ CallRuntime(Runtime::kDebugOnFunctionCall); 783 __ CallRuntime(Runtime::kDebugOnFunctionCall);
784 __ Pop(a1, a2); 784 __ Pop(a1, a2);
785 } 785 }
786 __ Branch(USE_DELAY_SLOT, &stepping_prepared); 786 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
787 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); 787 __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
788 788
789 __ bind(&prepare_step_in_suspended_generator); 789 __ bind(&prepare_step_in_suspended_generator);
790 { 790 {
791 FrameScope scope(masm, StackFrame::INTERNAL); 791 FrameScope scope(masm, StackFrame::INTERNAL);
792 __ Push(a1, a2); 792 __ Push(a1, a2);
793 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); 793 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
794 __ Pop(a1, a2); 794 __ Pop(a1, a2);
795 } 795 }
796 __ Branch(USE_DELAY_SLOT, &stepping_prepared); 796 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
797 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); 797 __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
798 } 798 }
799 799
800 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 800 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
801 FrameScope scope(masm, StackFrame::INTERNAL); 801 FrameScope scope(masm, StackFrame::INTERNAL);
802 __ Push(a1); 802 __ Push(a1);
803 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); 803 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
804 } 804 }
805 805
806 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; 806 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
807 807
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
845 ProfileEntryHookStub::MaybeCallEntryHook(masm); 845 ProfileEntryHookStub::MaybeCallEntryHook(masm);
846 846
847 // Enter an internal frame. 847 // Enter an internal frame.
848 { 848 {
849 FrameScope scope(masm, StackFrame::INTERNAL); 849 FrameScope scope(masm, StackFrame::INTERNAL);
850 850
851 // Setup the context (we need to use the caller context from the isolate). 851 // Setup the context (we need to use the caller context from the isolate).
852 ExternalReference context_address(Isolate::kContextAddress, 852 ExternalReference context_address(Isolate::kContextAddress,
853 masm->isolate()); 853 masm->isolate());
854 __ li(cp, Operand(context_address)); 854 __ li(cp, Operand(context_address));
855 __ ld(cp, MemOperand(cp)); 855 __ Ld(cp, MemOperand(cp));
856 856
857 // Push the function and the receiver onto the stack. 857 // Push the function and the receiver onto the stack.
858 __ Push(a1, a2); 858 __ Push(a1, a2);
859 859
860 // Check if we have enough stack space to push all arguments. 860 // Check if we have enough stack space to push all arguments.
861 // Clobbers a2. 861 // Clobbers a2.
862 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt); 862 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
863 863
864 // Remember new.target. 864 // Remember new.target.
865 __ mov(a5, a0); 865 __ mov(a5, a0);
866 866
867 // Copy arguments to the stack in a loop. 867 // Copy arguments to the stack in a loop.
868 // a3: argc 868 // a3: argc
869 // s0: argv, i.e. points to first arg 869 // s0: argv, i.e. points to first arg
870 Label loop, entry; 870 Label loop, entry;
871 __ Dlsa(a6, s0, a3, kPointerSizeLog2); 871 __ Dlsa(a6, s0, a3, kPointerSizeLog2);
872 __ b(&entry); 872 __ b(&entry);
873 __ nop(); // Branch delay slot nop. 873 __ nop(); // Branch delay slot nop.
874 // a6 points past last arg. 874 // a6 points past last arg.
875 __ bind(&loop); 875 __ bind(&loop);
876 __ ld(a4, MemOperand(s0)); // Read next parameter. 876 __ Ld(a4, MemOperand(s0)); // Read next parameter.
877 __ daddiu(s0, s0, kPointerSize); 877 __ daddiu(s0, s0, kPointerSize);
878 __ ld(a4, MemOperand(a4)); // Dereference handle. 878 __ Ld(a4, MemOperand(a4)); // Dereference handle.
879 __ push(a4); // Push parameter. 879 __ push(a4); // Push parameter.
880 __ bind(&entry); 880 __ bind(&entry);
881 __ Branch(&loop, ne, s0, Operand(a6)); 881 __ Branch(&loop, ne, s0, Operand(a6));
882 882
883 // Setup new.target and argc. 883 // Setup new.target and argc.
884 __ mov(a0, a3); 884 __ mov(a0, a3);
885 __ mov(a3, a5); 885 __ mov(a3, a5);
886 886
887 // Initialize all JavaScript callee-saved registers, since they will be seen 887 // Initialize all JavaScript callee-saved registers, since they will be seen
888 // by the garbage collector as part of handlers. 888 // by the garbage collector as part of handlers.
(...skipping 22 matching lines...) Expand all
911 } 911 }
912 912
913 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 913 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
914 Generate_JSEntryTrampolineHelper(masm, true); 914 Generate_JSEntryTrampolineHelper(masm, true);
915 } 915 }
916 916
917 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) { 917 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
918 Register args_count = scratch; 918 Register args_count = scratch;
919 919
920 // Get the arguments + receiver count. 920 // Get the arguments + receiver count.
921 __ ld(args_count, 921 __ Ld(args_count,
922 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 922 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
923 __ lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset)); 923 __ Lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
924 924
925 // Leave the frame (also dropping the register file). 925 // Leave the frame (also dropping the register file).
926 __ LeaveFrame(StackFrame::JAVA_SCRIPT); 926 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
927 927
928 // Drop receiver + arguments. 928 // Drop receiver + arguments.
929 __ Daddu(sp, sp, args_count); 929 __ Daddu(sp, sp, args_count);
930 } 930 }
931 931
932 // Generate code for entering a JS function with the interpreter. 932 // Generate code for entering a JS function with the interpreter.
933 // On entry to the function the receiver and arguments have been pushed on the 933 // On entry to the function the receiver and arguments have been pushed on the
(...skipping 14 matching lines...) Expand all
948 ProfileEntryHookStub::MaybeCallEntryHook(masm); 948 ProfileEntryHookStub::MaybeCallEntryHook(masm);
949 949
950 // Open a frame scope to indicate that there is a frame on the stack. The 950 // Open a frame scope to indicate that there is a frame on the stack. The
951 // MANUAL indicates that the scope shouldn't actually generate code to set up 951 // MANUAL indicates that the scope shouldn't actually generate code to set up
952 // the frame (that is done below). 952 // the frame (that is done below).
953 FrameScope frame_scope(masm, StackFrame::MANUAL); 953 FrameScope frame_scope(masm, StackFrame::MANUAL);
954 __ PushStandardFrame(a1); 954 __ PushStandardFrame(a1);
955 955
956 // Get the bytecode array from the function object (or from the DebugInfo if 956 // Get the bytecode array from the function object (or from the DebugInfo if
957 // it is present) and load it into kInterpreterBytecodeArrayRegister. 957 // it is present) and load it into kInterpreterBytecodeArrayRegister.
958 __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 958 __ Ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
959 Label load_debug_bytecode_array, bytecode_array_loaded; 959 Label load_debug_bytecode_array, bytecode_array_loaded;
960 Register debug_info = kInterpreterBytecodeArrayRegister; 960 Register debug_info = kInterpreterBytecodeArrayRegister;
961 DCHECK(!debug_info.is(a0)); 961 DCHECK(!debug_info.is(a0));
962 __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset)); 962 __ Ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
963 __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array); 963 __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array);
964 __ ld(kInterpreterBytecodeArrayRegister, 964 __ Ld(kInterpreterBytecodeArrayRegister,
965 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset)); 965 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
966 __ bind(&bytecode_array_loaded); 966 __ bind(&bytecode_array_loaded);
967 967
968 // Check whether we should continue to use the interpreter. 968 // Check whether we should continue to use the interpreter.
969 // TODO(rmcilroy) Remove self healing once liveedit only has to deal with 969 // TODO(rmcilroy) Remove self healing once liveedit only has to deal with
970 // Ignition bytecode. 970 // Ignition bytecode.
971 Label switch_to_different_code_kind; 971 Label switch_to_different_code_kind;
972 __ ld(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset)); 972 __ Ld(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset));
973 __ Branch(&switch_to_different_code_kind, ne, a0, 973 __ Branch(&switch_to_different_code_kind, ne, a0,
974 Operand(masm->CodeObject())); // Self-reference to this code. 974 Operand(masm->CodeObject())); // Self-reference to this code.
975 975
976 // Increment invocation count for the function. 976 // Increment invocation count for the function.
977 __ ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset)); 977 __ Ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset));
978 __ ld(a0, FieldMemOperand(a0, Cell::kValueOffset)); 978 __ Ld(a0, FieldMemOperand(a0, Cell::kValueOffset));
979 __ ld(a4, FieldMemOperand( 979 __ Ld(a4, FieldMemOperand(
980 a0, FeedbackVector::kInvocationCountIndex * kPointerSize + 980 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
981 FeedbackVector::kHeaderSize)); 981 FeedbackVector::kHeaderSize));
982 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); 982 __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
983 __ sd(a4, FieldMemOperand( 983 __ Sd(a4, FieldMemOperand(
984 a0, FeedbackVector::kInvocationCountIndex * kPointerSize + 984 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
985 FeedbackVector::kHeaderSize)); 985 FeedbackVector::kHeaderSize));
986 986
987 // Check function data field is actually a BytecodeArray object. 987 // Check function data field is actually a BytecodeArray object.
988 if (FLAG_debug_code) { 988 if (FLAG_debug_code) {
989 __ SmiTst(kInterpreterBytecodeArrayRegister, a4); 989 __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
990 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4, 990 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
991 Operand(zero_reg)); 991 Operand(zero_reg));
992 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4); 992 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
993 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4, 993 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
994 Operand(BYTECODE_ARRAY_TYPE)); 994 Operand(BYTECODE_ARRAY_TYPE));
995 } 995 }
996 996
997 // Reset code age. 997 // Reset code age.
998 DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge); 998 DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
999 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister, 999 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1000 BytecodeArray::kBytecodeAgeOffset)); 1000 BytecodeArray::kBytecodeAgeOffset));
1001 1001
1002 // Load initial bytecode offset. 1002 // Load initial bytecode offset.
1003 __ li(kInterpreterBytecodeOffsetRegister, 1003 __ li(kInterpreterBytecodeOffsetRegister,
1004 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); 1004 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1005 1005
1006 // Push new.target, bytecode array and Smi tagged bytecode array offset. 1006 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1007 __ SmiTag(a4, kInterpreterBytecodeOffsetRegister); 1007 __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
1008 __ Push(a3, kInterpreterBytecodeArrayRegister, a4); 1008 __ Push(a3, kInterpreterBytecodeArrayRegister, a4);
1009 1009
1010 // Allocate the local and temporary register file on the stack. 1010 // Allocate the local and temporary register file on the stack.
1011 { 1011 {
1012 // Load frame size (word) from the BytecodeArray object. 1012 // Load frame size (word) from the BytecodeArray object.
1013 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, 1013 __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1014 BytecodeArray::kFrameSizeOffset)); 1014 BytecodeArray::kFrameSizeOffset));
1015 1015
1016 // Do a stack check to ensure we don't go over the limit. 1016 // Do a stack check to ensure we don't go over the limit.
1017 Label ok; 1017 Label ok;
1018 __ Dsubu(a5, sp, Operand(a4)); 1018 __ Dsubu(a5, sp, Operand(a4));
1019 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 1019 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1020 __ Branch(&ok, hs, a5, Operand(a2)); 1020 __ Branch(&ok, hs, a5, Operand(a2));
1021 __ CallRuntime(Runtime::kThrowStackOverflow); 1021 __ CallRuntime(Runtime::kThrowStackOverflow);
1022 __ bind(&ok); 1022 __ bind(&ok);
1023 1023
(...skipping 13 matching lines...) Expand all
1037 1037
1038 // Load accumulator and dispatch table into registers. 1038 // Load accumulator and dispatch table into registers.
1039 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); 1039 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1040 __ li(kInterpreterDispatchTableRegister, 1040 __ li(kInterpreterDispatchTableRegister,
1041 Operand(ExternalReference::interpreter_dispatch_table_address( 1041 Operand(ExternalReference::interpreter_dispatch_table_address(
1042 masm->isolate()))); 1042 masm->isolate())));
1043 1043
1044 // Dispatch to the first bytecode handler for the function. 1044 // Dispatch to the first bytecode handler for the function.
1045 __ Daddu(a0, kInterpreterBytecodeArrayRegister, 1045 __ Daddu(a0, kInterpreterBytecodeArrayRegister,
1046 kInterpreterBytecodeOffsetRegister); 1046 kInterpreterBytecodeOffsetRegister);
1047 __ lbu(a0, MemOperand(a0)); 1047 __ Lbu(a0, MemOperand(a0));
1048 __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2); 1048 __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
1049 __ ld(at, MemOperand(at)); 1049 __ Ld(at, MemOperand(at));
1050 __ Call(at); 1050 __ Call(at);
1051 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); 1051 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1052 1052
1053 // The return value is in v0. 1053 // The return value is in v0.
1054 LeaveInterpreterFrame(masm, t0); 1054 LeaveInterpreterFrame(masm, t0);
1055 __ Jump(ra); 1055 __ Jump(ra);
1056 1056
1057 // Load debug copy of the bytecode array. 1057 // Load debug copy of the bytecode array.
1058 __ bind(&load_debug_bytecode_array); 1058 __ bind(&load_debug_bytecode_array);
1059 __ ld(kInterpreterBytecodeArrayRegister, 1059 __ Ld(kInterpreterBytecodeArrayRegister,
1060 FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex)); 1060 FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1061 __ Branch(&bytecode_array_loaded); 1061 __ Branch(&bytecode_array_loaded);
1062 1062
1063 // If the shared code is no longer this entry trampoline, then the underlying 1063 // If the shared code is no longer this entry trampoline, then the underlying
1064 // function has been switched to a different kind of code and we heal the 1064 // function has been switched to a different kind of code and we heal the
1065 // closure by switching the code entry field over to the new code as well. 1065 // closure by switching the code entry field over to the new code as well.
1066 __ bind(&switch_to_different_code_kind); 1066 __ bind(&switch_to_different_code_kind);
1067 __ LeaveFrame(StackFrame::JAVA_SCRIPT); 1067 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1068 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 1068 __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1069 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset)); 1069 __ Ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset));
1070 __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); 1070 __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
1071 __ sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 1071 __ Sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1072 __ RecordWriteCodeEntryField(a1, a4, a5); 1072 __ RecordWriteCodeEntryField(a1, a4, a5);
1073 __ Jump(a4); 1073 __ Jump(a4);
1074 } 1074 }
1075 1075
1076 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, 1076 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1077 Register scratch1, Register scratch2, 1077 Register scratch1, Register scratch2,
1078 Label* stack_overflow) { 1078 Label* stack_overflow) {
1079 // Check the stack for overflow. We are not trying to catch 1079 // Check the stack for overflow. We are not trying to catch
1080 // interruptions (e.g. debug break and preemption) here, so the "real stack 1080 // interruptions (e.g. debug break and preemption) here, so the "real stack
1081 // limit" is checked. 1081 // limit" is checked.
(...skipping 12 matching lines...) Expand all
1094 Register scratch, Register scratch2) { 1094 Register scratch, Register scratch2) {
1095 // Find the address of the last argument. 1095 // Find the address of the last argument.
1096 __ mov(scratch2, num_args); 1096 __ mov(scratch2, num_args);
1097 __ dsll(scratch2, scratch2, kPointerSizeLog2); 1097 __ dsll(scratch2, scratch2, kPointerSizeLog2);
1098 __ Dsubu(scratch2, index, Operand(scratch2)); 1098 __ Dsubu(scratch2, index, Operand(scratch2));
1099 1099
1100 // Push the arguments. 1100 // Push the arguments.
1101 Label loop_header, loop_check; 1101 Label loop_header, loop_check;
1102 __ Branch(&loop_check); 1102 __ Branch(&loop_check);
1103 __ bind(&loop_header); 1103 __ bind(&loop_header);
1104 __ ld(scratch, MemOperand(index)); 1104 __ Ld(scratch, MemOperand(index));
1105 __ Daddu(index, index, Operand(-kPointerSize)); 1105 __ Daddu(index, index, Operand(-kPointerSize));
1106 __ push(scratch); 1106 __ push(scratch);
1107 __ bind(&loop_check); 1107 __ bind(&loop_check);
1108 __ Branch(&loop_header, gt, index, Operand(scratch2)); 1108 __ Branch(&loop_header, gt, index, Operand(scratch2));
1109 } 1109 }
1110 1110
1111 // static 1111 // static
1112 void Builtins::Generate_InterpreterPushArgsThenCallImpl( 1112 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1113 MacroAssembler* masm, ConvertReceiverMode receiver_mode, 1113 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1114 TailCallMode tail_call_mode, InterpreterPushArgsMode mode) { 1114 TailCallMode tail_call_mode, InterpreterPushArgsMode mode) {
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
1175 1175
1176 // This function modifies t0, a4 and a5. 1176 // This function modifies t0, a4 and a5.
1177 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0); 1177 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0);
1178 1178
1179 __ AssertUndefinedOrAllocationSite(a2, t0); 1179 __ AssertUndefinedOrAllocationSite(a2, t0);
1180 if (mode == InterpreterPushArgsMode::kJSFunction) { 1180 if (mode == InterpreterPushArgsMode::kJSFunction) {
1181 __ AssertFunction(a1); 1181 __ AssertFunction(a1);
1182 1182
1183 // Tail call to the function-specific construct stub (still in the caller 1183 // Tail call to the function-specific construct stub (still in the caller
1184 // context at this point). 1184 // context at this point).
1185 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 1185 __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1186 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); 1186 __ Ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
1187 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); 1187 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
1188 __ Jump(at); 1188 __ Jump(at);
1189 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { 1189 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1190 // Call the constructor with a0, a1, and a3 unmodified. 1190 // Call the constructor with a0, a1, and a3 unmodified.
1191 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), 1191 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1192 RelocInfo::CODE_TARGET); 1192 RelocInfo::CODE_TARGET);
1193 } else { 1193 } else {
1194 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode); 1194 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1195 // Call the constructor with a0, a1, and a3 unmodified. 1195 // Call the constructor with a0, a1, and a3 unmodified.
1196 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1196 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1248 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); 1248 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1249 __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() + 1249 __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1250 Code::kHeaderSize - kHeapObjectTag)); 1250 Code::kHeaderSize - kHeapObjectTag));
1251 1251
1252 // Initialize the dispatch table register. 1252 // Initialize the dispatch table register.
1253 __ li(kInterpreterDispatchTableRegister, 1253 __ li(kInterpreterDispatchTableRegister,
1254 Operand(ExternalReference::interpreter_dispatch_table_address( 1254 Operand(ExternalReference::interpreter_dispatch_table_address(
1255 masm->isolate()))); 1255 masm->isolate())));
1256 1256
1257 // Get the bytecode array pointer from the frame. 1257 // Get the bytecode array pointer from the frame.
1258 __ ld(kInterpreterBytecodeArrayRegister, 1258 __ Ld(kInterpreterBytecodeArrayRegister,
1259 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 1259 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1260 1260
1261 if (FLAG_debug_code) { 1261 if (FLAG_debug_code) {
1262 // Check function data field is actually a BytecodeArray object. 1262 // Check function data field is actually a BytecodeArray object.
1263 __ SmiTst(kInterpreterBytecodeArrayRegister, at); 1263 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1264 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, 1264 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1265 Operand(zero_reg)); 1265 Operand(zero_reg));
1266 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); 1266 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1267 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, 1267 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1268 Operand(BYTECODE_ARRAY_TYPE)); 1268 Operand(BYTECODE_ARRAY_TYPE));
1269 } 1269 }
1270 1270
1271 // Get the target bytecode offset from the frame. 1271 // Get the target bytecode offset from the frame.
1272 __ lw( 1272 __ Lw(
1273 kInterpreterBytecodeOffsetRegister, 1273 kInterpreterBytecodeOffsetRegister,
1274 UntagSmiMemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 1274 UntagSmiMemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1275 1275
1276 // Dispatch to the target bytecode. 1276 // Dispatch to the target bytecode.
1277 __ Daddu(a1, kInterpreterBytecodeArrayRegister, 1277 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1278 kInterpreterBytecodeOffsetRegister); 1278 kInterpreterBytecodeOffsetRegister);
1279 __ lbu(a1, MemOperand(a1)); 1279 __ Lbu(a1, MemOperand(a1));
1280 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); 1280 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
1281 __ ld(a1, MemOperand(a1)); 1281 __ Ld(a1, MemOperand(a1));
1282 __ Jump(a1); 1282 __ Jump(a1);
1283 } 1283 }
1284 1284
1285 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) { 1285 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1286 // Advance the current bytecode offset stored within the given interpreter 1286 // Advance the current bytecode offset stored within the given interpreter
1287 // stack frame. This simulates what all bytecode handlers do upon completion 1287 // stack frame. This simulates what all bytecode handlers do upon completion
1288 // of the underlying operation. 1288 // of the underlying operation.
1289 __ ld(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 1289 __ Ld(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1290 __ ld(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 1290 __ Ld(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1291 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1291 __ Ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1292 { 1292 {
1293 FrameScope scope(masm, StackFrame::INTERNAL); 1293 FrameScope scope(masm, StackFrame::INTERNAL);
1294 __ Push(kInterpreterAccumulatorRegister, a1, a2); 1294 __ Push(kInterpreterAccumulatorRegister, a1, a2);
1295 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); 1295 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1296 __ mov(a2, v0); // Result is the new bytecode offset. 1296 __ mov(a2, v0); // Result is the new bytecode offset.
1297 __ Pop(kInterpreterAccumulatorRegister); 1297 __ Pop(kInterpreterAccumulatorRegister);
1298 } 1298 }
1299 __ sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 1299 __ Sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1300 1300
1301 Generate_InterpreterEnterBytecode(masm); 1301 Generate_InterpreterEnterBytecode(masm);
1302 } 1302 }
1303 1303
1304 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { 1304 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1305 Generate_InterpreterEnterBytecode(masm); 1305 Generate_InterpreterEnterBytecode(masm);
1306 } 1306 }
1307 1307
1308 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 1308 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1309 // ----------- S t a t e ------------- 1309 // ----------- S t a t e -------------
1310 // -- a0 : argument count (preserved for callee) 1310 // -- a0 : argument count (preserved for callee)
1311 // -- a3 : new target (preserved for callee) 1311 // -- a3 : new target (preserved for callee)
1312 // -- a1 : target function (preserved for callee) 1312 // -- a1 : target function (preserved for callee)
1313 // ----------------------------------- 1313 // -----------------------------------
1314 // First lookup code, maybe we don't need to compile! 1314 // First lookup code, maybe we don't need to compile!
1315 Label gotta_call_runtime, gotta_call_runtime_no_stack; 1315 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1316 Label try_shared; 1316 Label try_shared;
1317 Label loop_top, loop_bottom; 1317 Label loop_top, loop_bottom;
1318 1318
1319 Register argument_count = a0; 1319 Register argument_count = a0;
1320 Register closure = a1; 1320 Register closure = a1;
1321 Register new_target = a3; 1321 Register new_target = a3;
1322 Register map = a0; 1322 Register map = a0;
1323 Register index = a2; 1323 Register index = a2;
1324 1324
1325 // Do we have a valid feedback vector? 1325 // Do we have a valid feedback vector?
1326 __ ld(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); 1326 __ Ld(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
1327 __ ld(index, FieldMemOperand(index, Cell::kValueOffset)); 1327 __ Ld(index, FieldMemOperand(index, Cell::kValueOffset));
1328 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, 1328 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
1329 &gotta_call_runtime_no_stack); 1329 &gotta_call_runtime_no_stack);
1330 1330
1331 __ push(argument_count); 1331 __ push(argument_count);
1332 __ push(new_target); 1332 __ push(new_target);
1333 __ push(closure); 1333 __ push(closure);
1334 1334
1335 __ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); 1335 __ Ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1336 __ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); 1336 __ Ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1337 __ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset)); 1337 __ Ld(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1338 __ Branch(&try_shared, lt, index, Operand(Smi::FromInt(2))); 1338 __ Branch(&try_shared, lt, index, Operand(Smi::FromInt(2)));
1339 1339
1340 // a3 : native context 1340 // a3 : native context
1341 // a2 : length / index 1341 // a2 : length / index
1342 // a0 : optimized code map 1342 // a0 : optimized code map
1343 // stack[0] : new target 1343 // stack[0] : new target
1344 // stack[4] : closure 1344 // stack[4] : closure
1345 Register native_context = a3; 1345 Register native_context = a3;
1346 __ ld(native_context, NativeContextMemOperand()); 1346 __ Ld(native_context, NativeContextMemOperand());
1347 1347
1348 __ bind(&loop_top); 1348 __ bind(&loop_top);
1349 Register temp = a1; 1349 Register temp = a1;
1350 Register array_pointer = a5; 1350 Register array_pointer = a5;
1351 1351
1352 // Does the native context match? 1352 // Does the native context match?
1353 __ SmiScale(at, index, kPointerSizeLog2); 1353 __ SmiScale(at, index, kPointerSizeLog2);
1354 __ Daddu(array_pointer, map, Operand(at)); 1354 __ Daddu(array_pointer, map, Operand(at));
1355 __ ld(temp, FieldMemOperand(array_pointer, 1355 __ Ld(temp, FieldMemOperand(array_pointer,
1356 SharedFunctionInfo::kOffsetToPreviousContext)); 1356 SharedFunctionInfo::kOffsetToPreviousContext));
1357 __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); 1357 __ Ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1358 __ Branch(&loop_bottom, ne, temp, Operand(native_context)); 1358 __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1359 1359
1360 // Code available? 1360 // Code available?
1361 Register entry = a4; 1361 Register entry = a4;
1362 __ ld(entry, 1362 __ Ld(entry,
1363 FieldMemOperand(array_pointer, 1363 FieldMemOperand(array_pointer,
1364 SharedFunctionInfo::kOffsetToPreviousCachedCode)); 1364 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1365 __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); 1365 __ Ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1366 __ JumpIfSmi(entry, &try_shared); 1366 __ JumpIfSmi(entry, &try_shared);
1367 1367
1368 // Found code. Get it into the closure and return. 1368 // Found code. Get it into the closure and return.
1369 __ pop(closure); 1369 __ pop(closure);
1370 // Store code entry in the closure. 1370 // Store code entry in the closure.
1371 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); 1371 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1372 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); 1372 __ Sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1373 __ RecordWriteCodeEntryField(closure, entry, a5); 1373 __ RecordWriteCodeEntryField(closure, entry, a5);
1374 1374
1375 // Link the closure into the optimized function list. 1375 // Link the closure into the optimized function list.
1376 // a4 : code entry 1376 // a4 : code entry
1377 // a3 : native context 1377 // a3 : native context
1378 // a1 : closure 1378 // a1 : closure
1379 __ ld(a5, 1379 __ Ld(a5,
1380 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); 1380 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1381 __ sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); 1381 __ Sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1382 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0, 1382 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0,
1383 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET, 1383 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1384 OMIT_SMI_CHECK); 1384 OMIT_SMI_CHECK);
1385 const int function_list_offset = 1385 const int function_list_offset =
1386 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); 1386 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1387 __ sd(closure, 1387 __ Sd(closure,
1388 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); 1388 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1389 // Save closure before the write barrier. 1389 // Save closure before the write barrier.
1390 __ mov(a5, closure); 1390 __ mov(a5, closure);
1391 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0, 1391 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1392 kRAHasNotBeenSaved, kDontSaveFPRegs); 1392 kRAHasNotBeenSaved, kDontSaveFPRegs);
1393 __ mov(closure, a5); 1393 __ mov(closure, a5);
1394 __ pop(new_target); 1394 __ pop(new_target);
1395 __ pop(argument_count); 1395 __ pop(argument_count);
1396 __ Jump(entry); 1396 __ Jump(entry);
1397 1397
1398 __ bind(&loop_bottom); 1398 __ bind(&loop_bottom);
1399 __ Dsubu(index, index, 1399 __ Dsubu(index, index,
1400 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength))); 1400 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1401 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1))); 1401 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1402 1402
1403 // We found no code. 1403 // We found no code.
1404 __ bind(&try_shared); 1404 __ bind(&try_shared);
1405 __ pop(closure); 1405 __ pop(closure);
1406 __ pop(new_target); 1406 __ pop(new_target);
1407 __ pop(argument_count); 1407 __ pop(argument_count);
1408 __ ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); 1408 __ Ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1409 // Is the shared function marked for tier up? 1409 // Is the shared function marked for tier up?
1410 __ lbu(a5, FieldMemOperand(entry, 1410 __ Lbu(a5, FieldMemOperand(entry,
1411 SharedFunctionInfo::kMarkedForTierUpByteOffset)); 1411 SharedFunctionInfo::kMarkedForTierUpByteOffset));
1412 __ And(a5, a5, 1412 __ And(a5, a5,
1413 Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte)); 1413 Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1414 __ Branch(&gotta_call_runtime_no_stack, ne, a5, Operand(zero_reg)); 1414 __ Branch(&gotta_call_runtime_no_stack, ne, a5, Operand(zero_reg));
1415 1415
1416 // If SFI points to anything other than CompileLazy, install that. 1416 // If SFI points to anything other than CompileLazy, install that.
1417 __ ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); 1417 __ Ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1418 __ Move(t1, masm->CodeObject()); 1418 __ Move(t1, masm->CodeObject());
1419 __ Branch(&gotta_call_runtime_no_stack, eq, entry, Operand(t1)); 1419 __ Branch(&gotta_call_runtime_no_stack, eq, entry, Operand(t1));
1420 1420
1421 // Install the SFI's code entry. 1421 // Install the SFI's code entry.
1422 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); 1422 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1423 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); 1423 __ Sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1424 __ RecordWriteCodeEntryField(closure, entry, a5); 1424 __ RecordWriteCodeEntryField(closure, entry, a5);
1425 __ Jump(entry); 1425 __ Jump(entry);
1426 1426
1427 __ bind(&gotta_call_runtime); 1427 __ bind(&gotta_call_runtime);
1428 __ pop(closure); 1428 __ pop(closure);
1429 __ pop(new_target); 1429 __ pop(new_target);
1430 __ pop(argument_count); 1430 __ pop(argument_count);
1431 __ bind(&gotta_call_runtime_no_stack); 1431 __ bind(&gotta_call_runtime_no_stack);
1432 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); 1432 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1433 } 1433 }
(...skipping 23 matching lines...) Expand all
1457 __ Push(a0, a1, a3, a1); 1457 __ Push(a0, a1, a3, a1);
1458 1458
1459 // Copy arguments from caller (stdlib, foreign, heap). 1459 // Copy arguments from caller (stdlib, foreign, heap).
1460 Label args_done; 1460 Label args_done;
1461 for (int j = 0; j < 4; ++j) { 1461 for (int j = 0; j < 4; ++j) {
1462 Label over; 1462 Label over;
1463 if (j < 3) { 1463 if (j < 3) {
1464 __ Branch(&over, ne, t2, Operand(j)); 1464 __ Branch(&over, ne, t2, Operand(j));
1465 } 1465 }
1466 for (int i = j - 1; i >= 0; --i) { 1466 for (int i = j - 1; i >= 0; --i) {
1467 __ ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + 1467 __ Ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1468 i * kPointerSize)); 1468 i * kPointerSize));
1469 __ push(t2); 1469 __ push(t2);
1470 } 1470 }
1471 for (int i = 0; i < 3 - j; ++i) { 1471 for (int i = 0; i < 3 - j; ++i) {
1472 __ PushRoot(Heap::kUndefinedValueRootIndex); 1472 __ PushRoot(Heap::kUndefinedValueRootIndex);
1473 } 1473 }
1474 if (j < 3) { 1474 if (j < 3) {
1475 __ jmp(&args_done); 1475 __ jmp(&args_done);
1476 __ bind(&over); 1476 __ bind(&over);
1477 } 1477 }
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
1606 Deoptimizer::BailoutType type) { 1606 Deoptimizer::BailoutType type) {
1607 { 1607 {
1608 FrameScope scope(masm, StackFrame::INTERNAL); 1608 FrameScope scope(masm, StackFrame::INTERNAL);
1609 // Pass the function and deoptimization type to the runtime system. 1609 // Pass the function and deoptimization type to the runtime system.
1610 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); 1610 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1611 __ push(a0); 1611 __ push(a0);
1612 __ CallRuntime(Runtime::kNotifyDeoptimized); 1612 __ CallRuntime(Runtime::kNotifyDeoptimized);
1613 } 1613 }
1614 1614
1615 // Get the full codegen state from the stack and untag it -> a6. 1615 // Get the full codegen state from the stack and untag it -> a6.
1616 __ lw(a6, UntagSmiMemOperand(sp, 0 * kPointerSize)); 1616 __ Lw(a6, UntagSmiMemOperand(sp, 0 * kPointerSize));
1617 // Switch on the state. 1617 // Switch on the state.
1618 Label with_tos_register, unknown_state; 1618 Label with_tos_register, unknown_state;
1619 __ Branch( 1619 __ Branch(
1620 &with_tos_register, ne, a6, 1620 &with_tos_register, ne, a6,
1621 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS))); 1621 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1622 __ Ret(USE_DELAY_SLOT); 1622 __ Ret(USE_DELAY_SLOT);
1623 // Safe to fill delay slot Addu will emit one instruction. 1623 // Safe to fill delay slot Addu will emit one instruction.
1624 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state. 1624 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1625 1625
1626 __ bind(&with_tos_register); 1626 __ bind(&with_tos_register);
1627 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code()); 1627 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1628 __ ld(v0, MemOperand(sp, 1 * kPointerSize)); 1628 __ Ld(v0, MemOperand(sp, 1 * kPointerSize));
1629 __ Branch( 1629 __ Branch(
1630 &unknown_state, ne, a6, 1630 &unknown_state, ne, a6,
1631 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::TOS_REGISTER))); 1631 Operand(static_cast<int64_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
1632 1632
1633 __ Ret(USE_DELAY_SLOT); 1633 __ Ret(USE_DELAY_SLOT);
1634 // Safe to fill delay slot Addu will emit one instruction. 1634 // Safe to fill delay slot Addu will emit one instruction.
1635 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state. 1635 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1636 1636
1637 __ bind(&unknown_state); 1637 __ bind(&unknown_state);
1638 __ stop("no cases left"); 1638 __ stop("no cases left");
1639 } 1639 }
1640 1640
1641 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1641 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1642 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1642 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1643 } 1643 }
1644 1644
1645 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 1645 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1646 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 1646 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1647 } 1647 }
1648 1648
1649 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1649 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1650 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1650 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1651 } 1651 }
1652 1652
1653 static void Generate_OnStackReplacementHelper(MacroAssembler* masm, 1653 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1654 bool has_handler_frame) { 1654 bool has_handler_frame) {
1655 // Lookup the function in the JavaScript frame. 1655 // Lookup the function in the JavaScript frame.
1656 if (has_handler_frame) { 1656 if (has_handler_frame) {
1657 __ ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1657 __ Ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1658 __ ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset)); 1658 __ Ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1659 } else { 1659 } else {
1660 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1660 __ Ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1661 } 1661 }
1662 1662
1663 { 1663 {
1664 FrameScope scope(masm, StackFrame::INTERNAL); 1664 FrameScope scope(masm, StackFrame::INTERNAL);
1665 // Pass function as argument. 1665 // Pass function as argument.
1666 __ push(a0); 1666 __ push(a0);
1667 __ CallRuntime(Runtime::kCompileForOnStackReplacement); 1667 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1668 } 1668 }
1669 1669
1670 // If the code object is null, just return to the caller. 1670 // If the code object is null, just return to the caller.
1671 __ Ret(eq, v0, Operand(Smi::kZero)); 1671 __ Ret(eq, v0, Operand(Smi::kZero));
1672 1672
1673 // Drop any potential handler frame that is be sitting on top of the actual 1673 // Drop any potential handler frame that is be sitting on top of the actual
1674 // JavaScript frame. This is the case then OSR is triggered from bytecode. 1674 // JavaScript frame. This is the case then OSR is triggered from bytecode.
1675 if (has_handler_frame) { 1675 if (has_handler_frame) {
1676 __ LeaveFrame(StackFrame::STUB); 1676 __ LeaveFrame(StackFrame::STUB);
1677 } 1677 }
1678 1678
1679 // Load deoptimization data from the code object. 1679 // Load deoptimization data from the code object.
1680 // <deopt_data> = <code>[#deoptimization_data_offset] 1680 // <deopt_data> = <code>[#deoptimization_data_offset]
1681 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 1681 __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1682 1682
1683 // Load the OSR entrypoint offset from the deoptimization data. 1683 // Load the OSR entrypoint offset from the deoptimization data.
1684 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] 1684 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1685 __ lw(a1, 1685 __ Lw(a1,
1686 UntagSmiMemOperand(a1, FixedArray::OffsetOfElementAt( 1686 UntagSmiMemOperand(a1, FixedArray::OffsetOfElementAt(
1687 DeoptimizationInputData::kOsrPcOffsetIndex) - 1687 DeoptimizationInputData::kOsrPcOffsetIndex) -
1688 kHeapObjectTag)); 1688 kHeapObjectTag));
1689 1689
1690 // Compute the target address = code_obj + header_size + osr_offset 1690 // Compute the target address = code_obj + header_size + osr_offset
1691 // <entry_addr> = <code_obj> + #header_size + <osr_offset> 1691 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1692 __ daddu(v0, v0, a1); 1692 __ daddu(v0, v0, a1);
1693 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); 1693 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1694 1694
1695 // And "return" to the OSR entry point of the function. 1695 // And "return" to the OSR entry point of the function.
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1729 // consistent state for a simple pop operation. 1729 // consistent state for a simple pop operation.
1730 1730
1731 __ Dsubu(sp, sp, Operand(2 * kPointerSize)); 1731 __ Dsubu(sp, sp, Operand(2 * kPointerSize));
1732 __ Dlsa(sp, sp, argc, kPointerSizeLog2); 1732 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1733 __ mov(scratch, argc); 1733 __ mov(scratch, argc);
1734 __ Pop(this_arg, arg_array); // Overwrite argc 1734 __ Pop(this_arg, arg_array); // Overwrite argc
1735 __ Movz(arg_array, undefined_value, scratch); // if argc == 0 1735 __ Movz(arg_array, undefined_value, scratch); // if argc == 0
1736 __ Movz(this_arg, undefined_value, scratch); // if argc == 0 1736 __ Movz(this_arg, undefined_value, scratch); // if argc == 0
1737 __ Dsubu(scratch, scratch, Operand(1)); 1737 __ Dsubu(scratch, scratch, Operand(1));
1738 __ Movz(arg_array, undefined_value, scratch); // if argc == 1 1738 __ Movz(arg_array, undefined_value, scratch); // if argc == 1
1739 __ ld(receiver, MemOperand(sp)); 1739 __ Ld(receiver, MemOperand(sp));
1740 __ sd(this_arg, MemOperand(sp)); 1740 __ Sd(this_arg, MemOperand(sp));
1741 } 1741 }
1742 1742
1743 // ----------- S t a t e ------------- 1743 // ----------- S t a t e -------------
1744 // -- a0 : argArray 1744 // -- a0 : argArray
1745 // -- a1 : receiver 1745 // -- a1 : receiver
1746 // -- a3 : undefined root value 1746 // -- a3 : undefined root value
1747 // -- sp[0] : thisArg 1747 // -- sp[0] : thisArg
1748 // ----------------------------------- 1748 // -----------------------------------
1749 1749
1750 // 2. Make sure the receiver is actually callable. 1750 // 2. Make sure the receiver is actually callable.
1751 Label receiver_not_callable; 1751 Label receiver_not_callable;
1752 __ JumpIfSmi(receiver, &receiver_not_callable); 1752 __ JumpIfSmi(receiver, &receiver_not_callable);
1753 __ ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset)); 1753 __ Ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset));
1754 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 1754 __ Lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1755 __ And(a4, a4, Operand(1 << Map::kIsCallable)); 1755 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1756 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg)); 1756 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1757 1757
1758 // 3. Tail call with no arguments if argArray is null or undefined. 1758 // 3. Tail call with no arguments if argArray is null or undefined.
1759 Label no_arguments; 1759 Label no_arguments;
1760 __ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments); 1760 __ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments);
1761 __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value)); 1761 __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
1762 1762
1763 // 4a. Apply the receiver to the given argArray (passing undefined for 1763 // 4a. Apply the receiver to the given argArray (passing undefined for
1764 // new.target). 1764 // new.target).
1765 DCHECK(undefined_value.is(a3)); 1765 DCHECK(undefined_value.is(a3));
1766 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1766 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1767 1767
1768 // 4b. The argArray is either null or undefined, so we tail call without any 1768 // 4b. The argArray is either null or undefined, so we tail call without any
1769 // arguments to the receiver. 1769 // arguments to the receiver.
1770 __ bind(&no_arguments); 1770 __ bind(&no_arguments);
1771 { 1771 {
1772 __ mov(a0, zero_reg); 1772 __ mov(a0, zero_reg);
1773 DCHECK(receiver.is(a1)); 1773 DCHECK(receiver.is(a1));
1774 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1774 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1775 } 1775 }
1776 1776
1777 // 4c. The receiver is not callable, throw an appropriate TypeError. 1777 // 4c. The receiver is not callable, throw an appropriate TypeError.
1778 __ bind(&receiver_not_callable); 1778 __ bind(&receiver_not_callable);
1779 { 1779 {
1780 __ sd(receiver, MemOperand(sp)); 1780 __ Sd(receiver, MemOperand(sp));
1781 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1781 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1782 } 1782 }
1783 } 1783 }
1784 1784
1785 // static 1785 // static
1786 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1786 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1787 // 1. Make sure we have at least one argument. 1787 // 1. Make sure we have at least one argument.
1788 // a0: actual number of arguments 1788 // a0: actual number of arguments
1789 { 1789 {
1790 Label done; 1790 Label done;
1791 __ Branch(&done, ne, a0, Operand(zero_reg)); 1791 __ Branch(&done, ne, a0, Operand(zero_reg));
1792 __ PushRoot(Heap::kUndefinedValueRootIndex); 1792 __ PushRoot(Heap::kUndefinedValueRootIndex);
1793 __ Daddu(a0, a0, Operand(1)); 1793 __ Daddu(a0, a0, Operand(1));
1794 __ bind(&done); 1794 __ bind(&done);
1795 } 1795 }
1796 1796
1797 // 2. Get the function to call (passed as receiver) from the stack. 1797 // 2. Get the function to call (passed as receiver) from the stack.
1798 // a0: actual number of arguments 1798 // a0: actual number of arguments
1799 __ Dlsa(at, sp, a0, kPointerSizeLog2); 1799 __ Dlsa(at, sp, a0, kPointerSizeLog2);
1800 __ ld(a1, MemOperand(at)); 1800 __ Ld(a1, MemOperand(at));
1801 1801
1802 // 3. Shift arguments and return address one slot down on the stack 1802 // 3. Shift arguments and return address one slot down on the stack
1803 // (overwriting the original receiver). Adjust argument count to make 1803 // (overwriting the original receiver). Adjust argument count to make
1804 // the original first argument the new receiver. 1804 // the original first argument the new receiver.
1805 // a0: actual number of arguments 1805 // a0: actual number of arguments
1806 // a1: function 1806 // a1: function
1807 { 1807 {
1808 Label loop; 1808 Label loop;
1809 // Calculate the copy start address (destination). Copy end address is sp. 1809 // Calculate the copy start address (destination). Copy end address is sp.
1810 __ Dlsa(a2, sp, a0, kPointerSizeLog2); 1810 __ Dlsa(a2, sp, a0, kPointerSizeLog2);
1811 1811
1812 __ bind(&loop); 1812 __ bind(&loop);
1813 __ ld(at, MemOperand(a2, -kPointerSize)); 1813 __ Ld(at, MemOperand(a2, -kPointerSize));
1814 __ sd(at, MemOperand(a2)); 1814 __ Sd(at, MemOperand(a2));
1815 __ Dsubu(a2, a2, Operand(kPointerSize)); 1815 __ Dsubu(a2, a2, Operand(kPointerSize));
1816 __ Branch(&loop, ne, a2, Operand(sp)); 1816 __ Branch(&loop, ne, a2, Operand(sp));
1817 // Adjust the actual number of arguments and remove the top element 1817 // Adjust the actual number of arguments and remove the top element
1818 // (which is a copy of the last argument). 1818 // (which is a copy of the last argument).
1819 __ Dsubu(a0, a0, Operand(1)); 1819 __ Dsubu(a0, a0, Operand(1));
1820 __ Pop(); 1820 __ Pop();
1821 } 1821 }
1822 1822
1823 // 4. Call the callable. 1823 // 4. Call the callable.
1824 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1824 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
(...skipping 29 matching lines...) Expand all
1854 __ Pop(target, this_argument, arguments_list); 1854 __ Pop(target, this_argument, arguments_list);
1855 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0 1855 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0
1856 __ Movz(this_argument, undefined_value, scratch); // if argc == 0 1856 __ Movz(this_argument, undefined_value, scratch); // if argc == 0
1857 __ Movz(target, undefined_value, scratch); // if argc == 0 1857 __ Movz(target, undefined_value, scratch); // if argc == 0
1858 __ Dsubu(scratch, scratch, Operand(1)); 1858 __ Dsubu(scratch, scratch, Operand(1));
1859 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1 1859 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1
1860 __ Movz(this_argument, undefined_value, scratch); // if argc == 1 1860 __ Movz(this_argument, undefined_value, scratch); // if argc == 1
1861 __ Dsubu(scratch, scratch, Operand(1)); 1861 __ Dsubu(scratch, scratch, Operand(1));
1862 __ Movz(arguments_list, undefined_value, scratch); // if argc == 2 1862 __ Movz(arguments_list, undefined_value, scratch); // if argc == 2
1863 1863
1864 __ sd(this_argument, MemOperand(sp, 0)); // Overwrite receiver 1864 __ Sd(this_argument, MemOperand(sp, 0)); // Overwrite receiver
1865 } 1865 }
1866 1866
1867 // ----------- S t a t e ------------- 1867 // ----------- S t a t e -------------
1868 // -- a0 : argumentsList 1868 // -- a0 : argumentsList
1869 // -- a1 : target 1869 // -- a1 : target
1870 // -- a3 : undefined root value 1870 // -- a3 : undefined root value
1871 // -- sp[0] : thisArgument 1871 // -- sp[0] : thisArgument
1872 // ----------------------------------- 1872 // -----------------------------------
1873 1873
1874 // 2. Make sure the target is actually callable. 1874 // 2. Make sure the target is actually callable.
1875 Label target_not_callable; 1875 Label target_not_callable;
1876 __ JumpIfSmi(target, &target_not_callable); 1876 __ JumpIfSmi(target, &target_not_callable);
1877 __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset)); 1877 __ Ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
1878 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 1878 __ Lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1879 __ And(a4, a4, Operand(1 << Map::kIsCallable)); 1879 __ And(a4, a4, Operand(1 << Map::kIsCallable));
1880 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg)); 1880 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
1881 1881
1882 // 3a. Apply the target to the given argumentsList (passing undefined for 1882 // 3a. Apply the target to the given argumentsList (passing undefined for
1883 // new.target). 1883 // new.target).
1884 DCHECK(undefined_value.is(a3)); 1884 DCHECK(undefined_value.is(a3));
1885 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1885 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1886 1886
1887 // 3b. The target is not callable, throw an appropriate TypeError. 1887 // 3b. The target is not callable, throw an appropriate TypeError.
1888 __ bind(&target_not_callable); 1888 __ bind(&target_not_callable);
1889 { 1889 {
1890 __ sd(target, MemOperand(sp)); 1890 __ Sd(target, MemOperand(sp));
1891 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1891 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1892 } 1892 }
1893 } 1893 }
1894 1894
1895 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 1895 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1896 // ----------- S t a t e ------------- 1896 // ----------- S t a t e -------------
1897 // -- a0 : argc 1897 // -- a0 : argc
1898 // -- sp[0] : new.target (optional) (dummy value if argc <= 2) 1898 // -- sp[0] : new.target (optional) (dummy value if argc <= 2)
1899 // -- sp[4] : argumentsList (dummy value if argc <= 1) 1899 // -- sp[4] : argumentsList (dummy value if argc <= 1)
1900 // -- sp[8] : target (dummy value if argc == 0) 1900 // -- sp[8] : target (dummy value if argc == 0)
(...skipping 20 matching lines...) Expand all
1921 __ Pop(target, arguments_list, new_target); 1921 __ Pop(target, arguments_list, new_target);
1922 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0 1922 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0
1923 __ Movz(new_target, undefined_value, scratch); // if argc == 0 1923 __ Movz(new_target, undefined_value, scratch); // if argc == 0
1924 __ Movz(target, undefined_value, scratch); // if argc == 0 1924 __ Movz(target, undefined_value, scratch); // if argc == 0
1925 __ Dsubu(scratch, scratch, Operand(1)); 1925 __ Dsubu(scratch, scratch, Operand(1));
1926 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1 1926 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1
1927 __ Movz(new_target, target, scratch); // if argc == 1 1927 __ Movz(new_target, target, scratch); // if argc == 1
1928 __ Dsubu(scratch, scratch, Operand(1)); 1928 __ Dsubu(scratch, scratch, Operand(1));
1929 __ Movz(new_target, target, scratch); // if argc == 2 1929 __ Movz(new_target, target, scratch); // if argc == 2
1930 1930
1931 __ sd(undefined_value, MemOperand(sp, 0)); // Overwrite receiver 1931 __ Sd(undefined_value, MemOperand(sp, 0)); // Overwrite receiver
1932 } 1932 }
1933 1933
1934 // ----------- S t a t e ------------- 1934 // ----------- S t a t e -------------
1935 // -- a0 : argumentsList 1935 // -- a0 : argumentsList
1936 // -- a1 : target 1936 // -- a1 : target
1937 // -- a3 : new.target 1937 // -- a3 : new.target
1938 // -- sp[0] : receiver (undefined) 1938 // -- sp[0] : receiver (undefined)
1939 // ----------------------------------- 1939 // -----------------------------------
1940 1940
1941 // 2. Make sure the target is actually a constructor. 1941 // 2. Make sure the target is actually a constructor.
1942 Label target_not_constructor; 1942 Label target_not_constructor;
1943 __ JumpIfSmi(target, &target_not_constructor); 1943 __ JumpIfSmi(target, &target_not_constructor);
1944 __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset)); 1944 __ Ld(a4, FieldMemOperand(target, HeapObject::kMapOffset));
1945 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 1945 __ Lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1946 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); 1946 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1947 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg)); 1947 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
1948 1948
1949 // 3. Make sure the target is actually a constructor. 1949 // 3. Make sure the target is actually a constructor.
1950 Label new_target_not_constructor; 1950 Label new_target_not_constructor;
1951 __ JumpIfSmi(new_target, &new_target_not_constructor); 1951 __ JumpIfSmi(new_target, &new_target_not_constructor);
1952 __ ld(a4, FieldMemOperand(new_target, HeapObject::kMapOffset)); 1952 __ Ld(a4, FieldMemOperand(new_target, HeapObject::kMapOffset));
1953 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); 1953 __ Lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1954 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); 1954 __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1955 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg)); 1955 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
1956 1956
1957 // 4a. Construct the target with the given new.target and argumentsList. 1957 // 4a. Construct the target with the given new.target and argumentsList.
1958 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1958 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1959 1959
1960 // 4b. The target is not a constructor, throw an appropriate TypeError. 1960 // 4b. The target is not a constructor, throw an appropriate TypeError.
1961 __ bind(&target_not_constructor); 1961 __ bind(&target_not_constructor);
1962 { 1962 {
1963 __ sd(target, MemOperand(sp)); 1963 __ Sd(target, MemOperand(sp));
1964 __ TailCallRuntime(Runtime::kThrowNotConstructor); 1964 __ TailCallRuntime(Runtime::kThrowNotConstructor);
1965 } 1965 }
1966 1966
1967 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 1967 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1968 __ bind(&new_target_not_constructor); 1968 __ bind(&new_target_not_constructor);
1969 { 1969 {
1970 __ sd(new_target, MemOperand(sp)); 1970 __ Sd(new_target, MemOperand(sp));
1971 __ TailCallRuntime(Runtime::kThrowNotConstructor); 1971 __ TailCallRuntime(Runtime::kThrowNotConstructor);
1972 } 1972 }
1973 } 1973 }
1974 1974
1975 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1975 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1976 // __ sll(a0, a0, kSmiTagSize); 1976 // __ sll(a0, a0, kSmiTagSize);
1977 __ dsll32(a0, a0, 0); 1977 __ dsll32(a0, a0, 0);
1978 __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 1978 __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1979 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit()); 1979 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1980 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + 1980 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1981 kPointerSize)); 1981 kPointerSize));
1982 } 1982 }
1983 1983
1984 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1984 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1985 // ----------- S t a t e ------------- 1985 // ----------- S t a t e -------------
1986 // -- v0 : result being passed through 1986 // -- v0 : result being passed through
1987 // ----------------------------------- 1987 // -----------------------------------
1988 // Get the number of arguments passed (as a smi), tear down the frame and 1988 // Get the number of arguments passed (as a smi), tear down the frame and
1989 // then tear down the parameters. 1989 // then tear down the parameters.
1990 __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + 1990 __ Ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1991 kPointerSize))); 1991 kPointerSize)));
1992 __ mov(sp, fp); 1992 __ mov(sp, fp);
1993 __ MultiPop(fp.bit() | ra.bit()); 1993 __ MultiPop(fp.bit() | ra.bit());
1994 __ SmiScale(a4, a1, kPointerSizeLog2); 1994 __ SmiScale(a4, a1, kPointerSizeLog2);
1995 __ Daddu(sp, sp, a4); 1995 __ Daddu(sp, sp, a4);
1996 // Adjust for the receiver. 1996 // Adjust for the receiver.
1997 __ Daddu(sp, sp, Operand(kPointerSize)); 1997 __ Daddu(sp, sp, Operand(kPointerSize));
1998 } 1998 }
1999 1999
2000 // static 2000 // static
(...skipping 13 matching lines...) Expand all
2014 Register len = a2; 2014 Register len = a2;
2015 2015
2016 // Create the list of arguments from the array-like argumentsList. 2016 // Create the list of arguments from the array-like argumentsList.
2017 { 2017 {
2018 Label create_arguments, create_array, create_holey_array, create_runtime, 2018 Label create_arguments, create_array, create_holey_array, create_runtime,
2019 done_create; 2019 done_create;
2020 __ JumpIfSmi(arguments_list, &create_runtime); 2020 __ JumpIfSmi(arguments_list, &create_runtime);
2021 2021
2022 // Load the map of argumentsList into a2. 2022 // Load the map of argumentsList into a2.
2023 Register arguments_list_map = a2; 2023 Register arguments_list_map = a2;
2024 __ ld(arguments_list_map, 2024 __ Ld(arguments_list_map,
2025 FieldMemOperand(arguments_list, HeapObject::kMapOffset)); 2025 FieldMemOperand(arguments_list, HeapObject::kMapOffset));
2026 2026
2027 // Load native context into a4. 2027 // Load native context into a4.
2028 Register native_context = a4; 2028 Register native_context = a4;
2029 __ ld(native_context, NativeContextMemOperand()); 2029 __ Ld(native_context, NativeContextMemOperand());
2030 2030
2031 // Check if argumentsList is an (unmodified) arguments object. 2031 // Check if argumentsList is an (unmodified) arguments object.
2032 __ ld(at, ContextMemOperand(native_context, 2032 __ Ld(at, ContextMemOperand(native_context,
2033 Context::SLOPPY_ARGUMENTS_MAP_INDEX)); 2033 Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2034 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at)); 2034 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
2035 __ ld(at, ContextMemOperand(native_context, 2035 __ Ld(at, ContextMemOperand(native_context,
2036 Context::STRICT_ARGUMENTS_MAP_INDEX)); 2036 Context::STRICT_ARGUMENTS_MAP_INDEX));
2037 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at)); 2037 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at));
2038 2038
2039 // Check if argumentsList is a fast JSArray. 2039 // Check if argumentsList is a fast JSArray.
2040 __ lbu(v0, FieldMemOperand(a2, Map::kInstanceTypeOffset)); 2040 __ Lbu(v0, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2041 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); 2041 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2042 2042
2043 // Ask the runtime to create the list (actually a FixedArray). 2043 // Ask the runtime to create the list (actually a FixedArray).
2044 __ bind(&create_runtime); 2044 __ bind(&create_runtime);
2045 { 2045 {
2046 FrameScope scope(masm, StackFrame::INTERNAL); 2046 FrameScope scope(masm, StackFrame::INTERNAL);
2047 __ Push(target, new_target, arguments_list); 2047 __ Push(target, new_target, arguments_list);
2048 __ CallRuntime(Runtime::kCreateListFromArrayLike); 2048 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2049 __ mov(arguments_list, v0); 2049 __ mov(arguments_list, v0);
2050 __ Pop(target, new_target); 2050 __ Pop(target, new_target);
2051 __ lw(len, UntagSmiFieldMemOperand(v0, FixedArray::kLengthOffset)); 2051 __ Lw(len, UntagSmiFieldMemOperand(v0, FixedArray::kLengthOffset));
2052 } 2052 }
2053 __ Branch(&done_create); 2053 __ Branch(&done_create);
2054 2054
2055 // Try to create the list from an arguments object. 2055 // Try to create the list from an arguments object.
2056 __ bind(&create_arguments); 2056 __ bind(&create_arguments);
2057 __ lw(len, UntagSmiFieldMemOperand(arguments_list, 2057 __ Lw(len, UntagSmiFieldMemOperand(arguments_list,
2058 JSArgumentsObject::kLengthOffset)); 2058 JSArgumentsObject::kLengthOffset));
2059 __ ld(a4, FieldMemOperand(arguments_list, JSObject::kElementsOffset)); 2059 __ Ld(a4, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
2060 __ lw(at, UntagSmiFieldMemOperand(a4, FixedArray::kLengthOffset)); 2060 __ Lw(at, UntagSmiFieldMemOperand(a4, FixedArray::kLengthOffset));
2061 __ Branch(&create_runtime, ne, len, Operand(at)); 2061 __ Branch(&create_runtime, ne, len, Operand(at));
2062 __ mov(args, a4); 2062 __ mov(args, a4);
2063 2063
2064 __ Branch(&done_create); 2064 __ Branch(&done_create);
2065 2065
2066 // For holey JSArrays we need to check that the array prototype chain 2066 // For holey JSArrays we need to check that the array prototype chain
2067 // protector is intact and our prototype is the Array.prototype actually. 2067 // protector is intact and our prototype is the Array.prototype actually.
2068 __ bind(&create_holey_array); 2068 __ bind(&create_holey_array);
2069 __ ld(a2, FieldMemOperand(a2, Map::kPrototypeOffset)); 2069 __ Ld(a2, FieldMemOperand(a2, Map::kPrototypeOffset));
2070 __ ld(at, ContextMemOperand(native_context, 2070 __ Ld(at, ContextMemOperand(native_context,
2071 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); 2071 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2072 __ Branch(&create_runtime, ne, a2, Operand(at)); 2072 __ Branch(&create_runtime, ne, a2, Operand(at));
2073 __ LoadRoot(at, Heap::kArrayProtectorRootIndex); 2073 __ LoadRoot(at, Heap::kArrayProtectorRootIndex);
2074 __ lw(a2, FieldMemOperand(at, PropertyCell::kValueOffset)); 2074 __ Lw(a2, FieldMemOperand(at, PropertyCell::kValueOffset));
2075 __ Branch(&create_runtime, ne, a2, 2075 __ Branch(&create_runtime, ne, a2,
2076 Operand(Smi::FromInt(Isolate::kProtectorValid))); 2076 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2077 __ lw(a2, UntagSmiFieldMemOperand(a0, JSArray::kLengthOffset)); 2077 __ Lw(a2, UntagSmiFieldMemOperand(a0, JSArray::kLengthOffset));
2078 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); 2078 __ Ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2079 __ Branch(&done_create); 2079 __ Branch(&done_create);
2080 2080
2081 // Try to create the list from a JSArray object. 2081 // Try to create the list from a JSArray object.
2082 __ bind(&create_array); 2082 __ bind(&create_array);
2083 __ lbu(t1, FieldMemOperand(a2, Map::kBitField2Offset)); 2083 __ Lbu(t1, FieldMemOperand(a2, Map::kBitField2Offset));
2084 __ DecodeField<Map::ElementsKindBits>(t1); 2084 __ DecodeField<Map::ElementsKindBits>(t1);
2085 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2085 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2086 STATIC_ASSERT(FAST_ELEMENTS == 2); 2086 STATIC_ASSERT(FAST_ELEMENTS == 2);
2087 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 2087 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2088 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_SMI_ELEMENTS)); 2088 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_SMI_ELEMENTS));
2089 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_ELEMENTS)); 2089 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_ELEMENTS));
2090 __ Branch(&create_runtime, hi, t1, Operand(FAST_ELEMENTS)); 2090 __ Branch(&create_runtime, hi, t1, Operand(FAST_ELEMENTS));
2091 __ lw(a2, UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset)); 2091 __ Lw(a2, UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2092 __ ld(a0, FieldMemOperand(arguments_list, JSArray::kElementsOffset)); 2092 __ Ld(a0, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2093 2093
2094 __ bind(&done_create); 2094 __ bind(&done_create);
2095 } 2095 }
2096 2096
2097 // Check for stack overflow. 2097 // Check for stack overflow.
2098 { 2098 {
2099 // Check the stack for overflow. We are not trying to catch interruptions 2099 // Check the stack for overflow. We are not trying to catch interruptions
2100 // (i.e. debug break and preemption) here, so check the "real stack limit". 2100 // (i.e. debug break and preemption) here, so check the "real stack limit".
2101 Label done; 2101 Label done;
2102 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex); 2102 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
(...skipping 21 matching lines...) Expand all
2124 Register src = a4; 2124 Register src = a4;
2125 Register scratch = len; 2125 Register scratch = len;
2126 2126
2127 __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag); 2127 __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
2128 __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT); 2128 __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
2129 __ mov(a0, len); // The 'len' argument for Call() or Construct(). 2129 __ mov(a0, len); // The 'len' argument for Call() or Construct().
2130 __ dsll(scratch, len, kPointerSizeLog2); 2130 __ dsll(scratch, len, kPointerSizeLog2);
2131 __ Dsubu(scratch, sp, Operand(scratch)); 2131 __ Dsubu(scratch, sp, Operand(scratch));
2132 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex); 2132 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2133 __ bind(&loop); 2133 __ bind(&loop);
2134 __ ld(a5, MemOperand(src)); 2134 __ Ld(a5, MemOperand(src));
2135 __ Branch(&push, ne, a5, Operand(t1)); 2135 __ Branch(&push, ne, a5, Operand(t1));
2136 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); 2136 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
2137 __ bind(&push); 2137 __ bind(&push);
2138 __ daddiu(src, src, kPointerSize); 2138 __ daddiu(src, src, kPointerSize);
2139 __ Push(a5); 2139 __ Push(a5);
2140 __ Branch(&loop, ne, scratch, Operand(sp)); 2140 __ Branch(&loop, ne, scratch, Operand(sp));
2141 __ bind(&done); 2141 __ bind(&done);
2142 } 2142 }
2143 2143
2144 // ----------- S t a t e ------------- 2144 // ----------- S t a t e -------------
(...skipping 23 matching lines...) Expand all
2168 Handle<Code> code) { 2168 Handle<Code> code) {
2169 // ----------- S t a t e ------------- 2169 // ----------- S t a t e -------------
2170 // -- a1 : the target to call (can be any Object) 2170 // -- a1 : the target to call (can be any Object)
2171 // -- a2 : start index (to support rest parameters) 2171 // -- a2 : start index (to support rest parameters)
2172 // -- ra : return address. 2172 // -- ra : return address.
2173 // -- sp[0] : thisArgument 2173 // -- sp[0] : thisArgument
2174 // ----------------------------------- 2174 // -----------------------------------
2175 2175
2176 // Check if we have an arguments adaptor frame below the function frame. 2176 // Check if we have an arguments adaptor frame below the function frame.
2177 Label arguments_adaptor, arguments_done; 2177 Label arguments_adaptor, arguments_done;
2178 __ ld(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2178 __ Ld(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2179 __ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset)); 2179 __ Ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
2180 __ Branch(&arguments_adaptor, eq, a0, 2180 __ Branch(&arguments_adaptor, eq, a0,
2181 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 2181 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2182 { 2182 {
2183 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2183 __ Ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2184 __ ld(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); 2184 __ Ld(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
2185 __ lw(a0, 2185 __ Lw(a0,
2186 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); 2186 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
2187 __ mov(a3, fp); 2187 __ mov(a3, fp);
2188 } 2188 }
2189 __ Branch(&arguments_done); 2189 __ Branch(&arguments_done);
2190 __ bind(&arguments_adaptor); 2190 __ bind(&arguments_adaptor);
2191 { 2191 {
2192 // Just get the length from the ArgumentsAdaptorFrame. 2192 // Just get the length from the ArgumentsAdaptorFrame.
2193 __ lw(a0, UntagSmiMemOperand( 2193 __ Lw(a0, UntagSmiMemOperand(
2194 a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2194 a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
2195 } 2195 }
2196 __ bind(&arguments_done); 2196 __ bind(&arguments_done);
2197 2197
2198 Label stack_empty, stack_done, stack_overflow; 2198 Label stack_empty, stack_done, stack_overflow;
2199 __ Subu(a0, a0, a2); 2199 __ Subu(a0, a0, a2);
2200 __ Branch(&stack_empty, le, a0, Operand(zero_reg)); 2200 __ Branch(&stack_empty, le, a0, Operand(zero_reg));
2201 { 2201 {
2202 // Check for stack overflow. 2202 // Check for stack overflow.
2203 Generate_StackOverflowCheck(masm, a0, a4, a5, &stack_overflow); 2203 Generate_StackOverflowCheck(masm, a0, a4, a5, &stack_overflow);
2204 2204
2205 // Forward the arguments from the caller frame. 2205 // Forward the arguments from the caller frame.
2206 { 2206 {
2207 Label loop; 2207 Label loop;
2208 __ mov(a2, a0); 2208 __ mov(a2, a0);
2209 __ bind(&loop); 2209 __ bind(&loop);
2210 { 2210 {
2211 __ Dlsa(at, a3, a2, kPointerSizeLog2); 2211 __ Dlsa(at, a3, a2, kPointerSizeLog2);
2212 __ ld(at, MemOperand(at, 1 * kPointerSize)); 2212 __ Ld(at, MemOperand(at, 1 * kPointerSize));
2213 __ push(at); 2213 __ push(at);
2214 __ Subu(a2, a2, Operand(1)); 2214 __ Subu(a2, a2, Operand(1));
2215 __ Branch(&loop, ne, a2, Operand(zero_reg)); 2215 __ Branch(&loop, ne, a2, Operand(zero_reg));
2216 } 2216 }
2217 } 2217 }
2218 } 2218 }
2219 __ Branch(&stack_done); 2219 __ Branch(&stack_done);
2220 __ bind(&stack_overflow); 2220 __ bind(&stack_overflow);
2221 __ TailCallRuntime(Runtime::kThrowStackOverflow); 2221 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2222 __ bind(&stack_empty); 2222 __ bind(&stack_empty);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2263 Register scratch3) { 2263 Register scratch3) {
2264 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); 2264 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2265 Comment cmnt(masm, "[ PrepareForTailCall"); 2265 Comment cmnt(masm, "[ PrepareForTailCall");
2266 2266
2267 // Prepare for tail call only if ES2015 tail call elimination is enabled. 2267 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2268 Label done; 2268 Label done;
2269 ExternalReference is_tail_call_elimination_enabled = 2269 ExternalReference is_tail_call_elimination_enabled =
2270 ExternalReference::is_tail_call_elimination_enabled_address( 2270 ExternalReference::is_tail_call_elimination_enabled_address(
2271 masm->isolate()); 2271 masm->isolate());
2272 __ li(at, Operand(is_tail_call_elimination_enabled)); 2272 __ li(at, Operand(is_tail_call_elimination_enabled));
2273 __ lb(scratch1, MemOperand(at)); 2273 __ Lb(scratch1, MemOperand(at));
2274 __ Branch(&done, eq, scratch1, Operand(zero_reg)); 2274 __ Branch(&done, eq, scratch1, Operand(zero_reg));
2275 2275
2276 // Drop possible interpreter handler/stub frame. 2276 // Drop possible interpreter handler/stub frame.
2277 { 2277 {
2278 Label no_interpreter_frame; 2278 Label no_interpreter_frame;
2279 __ ld(scratch3, 2279 __ Ld(scratch3,
2280 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset)); 2280 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2281 __ Branch(&no_interpreter_frame, ne, scratch3, 2281 __ Branch(&no_interpreter_frame, ne, scratch3,
2282 Operand(StackFrame::TypeToMarker(StackFrame::STUB))); 2282 Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
2283 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2283 __ Ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2284 __ bind(&no_interpreter_frame); 2284 __ bind(&no_interpreter_frame);
2285 } 2285 }
2286 2286
2287 // Check if next frame is an arguments adaptor frame. 2287 // Check if next frame is an arguments adaptor frame.
2288 Register caller_args_count_reg = scratch1; 2288 Register caller_args_count_reg = scratch1;
2289 Label no_arguments_adaptor, formal_parameter_count_loaded; 2289 Label no_arguments_adaptor, formal_parameter_count_loaded;
2290 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2290 __ Ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2291 __ ld(scratch3, 2291 __ Ld(scratch3,
2292 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset)); 2292 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2293 __ Branch(&no_arguments_adaptor, ne, scratch3, 2293 __ Branch(&no_arguments_adaptor, ne, scratch3,
2294 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 2294 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2295 2295
2296 // Drop current frame and load arguments count from arguments adaptor frame. 2296 // Drop current frame and load arguments count from arguments adaptor frame.
2297 __ mov(fp, scratch2); 2297 __ mov(fp, scratch2);
2298 __ lw(caller_args_count_reg, 2298 __ Lw(caller_args_count_reg,
2299 UntagSmiMemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2299 UntagSmiMemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2300 __ Branch(&formal_parameter_count_loaded); 2300 __ Branch(&formal_parameter_count_loaded);
2301 2301
2302 __ bind(&no_arguments_adaptor); 2302 __ bind(&no_arguments_adaptor);
2303 // Load caller's formal parameter count 2303 // Load caller's formal parameter count
2304 __ ld(scratch1, 2304 __ Ld(scratch1,
2305 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); 2305 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2306 __ ld(scratch1, 2306 __ Ld(scratch1,
2307 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); 2307 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2308 __ lw(caller_args_count_reg, 2308 __ Lw(caller_args_count_reg,
2309 FieldMemOperand(scratch1, 2309 FieldMemOperand(scratch1,
2310 SharedFunctionInfo::kFormalParameterCountOffset)); 2310 SharedFunctionInfo::kFormalParameterCountOffset));
2311 2311
2312 __ bind(&formal_parameter_count_loaded); 2312 __ bind(&formal_parameter_count_loaded);
2313 2313
2314 ParameterCount callee_args_count(args_reg); 2314 ParameterCount callee_args_count(args_reg);
2315 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, 2315 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2316 scratch3); 2316 scratch3);
2317 __ bind(&done); 2317 __ bind(&done);
2318 } 2318 }
2319 } // namespace 2319 } // namespace
2320 2320
2321 // static 2321 // static
2322 void Builtins::Generate_CallFunction(MacroAssembler* masm, 2322 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2323 ConvertReceiverMode mode, 2323 ConvertReceiverMode mode,
2324 TailCallMode tail_call_mode) { 2324 TailCallMode tail_call_mode) {
2325 // ----------- S t a t e ------------- 2325 // ----------- S t a t e -------------
2326 // -- a0 : the number of arguments (not including the receiver) 2326 // -- a0 : the number of arguments (not including the receiver)
2327 // -- a1 : the function to call (checked to be a JSFunction) 2327 // -- a1 : the function to call (checked to be a JSFunction)
2328 // ----------------------------------- 2328 // -----------------------------------
2329 __ AssertFunction(a1); 2329 __ AssertFunction(a1);
2330 2330
2331 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) 2331 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2332 // Check that function is not a "classConstructor". 2332 // Check that function is not a "classConstructor".
2333 Label class_constructor; 2333 Label class_constructor;
2334 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 2334 __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2335 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset)); 2335 __ Lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2336 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); 2336 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2337 __ Branch(&class_constructor, ne, at, Operand(zero_reg)); 2337 __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2338 2338
2339 // Enter the context of the function; ToObject has to run in the function 2339 // Enter the context of the function; ToObject has to run in the function
2340 // context, and we also need to take the global proxy from the function 2340 // context, and we also need to take the global proxy from the function
2341 // context in case of conversion. 2341 // context in case of conversion.
2342 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == 2342 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2343 SharedFunctionInfo::kStrictModeByteOffset); 2343 SharedFunctionInfo::kStrictModeByteOffset);
2344 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); 2344 __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2345 // We need to convert the receiver for non-native sloppy mode functions. 2345 // We need to convert the receiver for non-native sloppy mode functions.
2346 Label done_convert; 2346 Label done_convert;
2347 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); 2347 __ Lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2348 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | 2348 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2349 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); 2349 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2350 __ Branch(&done_convert, ne, at, Operand(zero_reg)); 2350 __ Branch(&done_convert, ne, at, Operand(zero_reg));
2351 { 2351 {
2352 // ----------- S t a t e ------------- 2352 // ----------- S t a t e -------------
2353 // -- a0 : the number of arguments (not including the receiver) 2353 // -- a0 : the number of arguments (not including the receiver)
2354 // -- a1 : the function to call (checked to be a JSFunction) 2354 // -- a1 : the function to call (checked to be a JSFunction)
2355 // -- a2 : the shared function info. 2355 // -- a2 : the shared function info.
2356 // -- cp : the function context. 2356 // -- cp : the function context.
2357 // ----------------------------------- 2357 // -----------------------------------
2358 2358
2359 if (mode == ConvertReceiverMode::kNullOrUndefined) { 2359 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2360 // Patch receiver to global proxy. 2360 // Patch receiver to global proxy.
2361 __ LoadGlobalProxy(a3); 2361 __ LoadGlobalProxy(a3);
2362 } else { 2362 } else {
2363 Label convert_to_object, convert_receiver; 2363 Label convert_to_object, convert_receiver;
2364 __ Dlsa(at, sp, a0, kPointerSizeLog2); 2364 __ Dlsa(at, sp, a0, kPointerSizeLog2);
2365 __ ld(a3, MemOperand(at)); 2365 __ Ld(a3, MemOperand(at));
2366 __ JumpIfSmi(a3, &convert_to_object); 2366 __ JumpIfSmi(a3, &convert_to_object);
2367 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2367 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2368 __ GetObjectType(a3, a4, a4); 2368 __ GetObjectType(a3, a4, a4);
2369 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE)); 2369 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
2370 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { 2370 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2371 Label convert_global_proxy; 2371 Label convert_global_proxy;
2372 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, 2372 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2373 &convert_global_proxy); 2373 &convert_global_proxy);
2374 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); 2374 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2375 __ bind(&convert_global_proxy); 2375 __ bind(&convert_global_proxy);
(...skipping 13 matching lines...) Expand all
2389 __ Push(a0, a1); 2389 __ Push(a0, a1);
2390 __ mov(a0, a3); 2390 __ mov(a0, a3);
2391 __ Push(cp); 2391 __ Push(cp);
2392 __ Call(masm->isolate()->builtins()->ToObject(), 2392 __ Call(masm->isolate()->builtins()->ToObject(),
2393 RelocInfo::CODE_TARGET); 2393 RelocInfo::CODE_TARGET);
2394 __ Pop(cp); 2394 __ Pop(cp);
2395 __ mov(a3, v0); 2395 __ mov(a3, v0);
2396 __ Pop(a0, a1); 2396 __ Pop(a0, a1);
2397 __ SmiUntag(a0); 2397 __ SmiUntag(a0);
2398 } 2398 }
2399 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 2399 __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2400 __ bind(&convert_receiver); 2400 __ bind(&convert_receiver);
2401 } 2401 }
2402 __ Dlsa(at, sp, a0, kPointerSizeLog2); 2402 __ Dlsa(at, sp, a0, kPointerSizeLog2);
2403 __ sd(a3, MemOperand(at)); 2403 __ Sd(a3, MemOperand(at));
2404 } 2404 }
2405 __ bind(&done_convert); 2405 __ bind(&done_convert);
2406 2406
2407 // ----------- S t a t e ------------- 2407 // ----------- S t a t e -------------
2408 // -- a0 : the number of arguments (not including the receiver) 2408 // -- a0 : the number of arguments (not including the receiver)
2409 // -- a1 : the function to call (checked to be a JSFunction) 2409 // -- a1 : the function to call (checked to be a JSFunction)
2410 // -- a2 : the shared function info. 2410 // -- a2 : the shared function info.
2411 // -- cp : the function context. 2411 // -- cp : the function context.
2412 // ----------------------------------- 2412 // -----------------------------------
2413 2413
2414 if (tail_call_mode == TailCallMode::kAllow) { 2414 if (tail_call_mode == TailCallMode::kAllow) {
2415 PrepareForTailCall(masm, a0, t0, t1, t2); 2415 PrepareForTailCall(masm, a0, t0, t1, t2);
2416 } 2416 }
2417 2417
2418 __ lw(a2, 2418 __ Lw(a2,
2419 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); 2419 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2420 ParameterCount actual(a0); 2420 ParameterCount actual(a0);
2421 ParameterCount expected(a2); 2421 ParameterCount expected(a2);
2422 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, 2422 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2423 CheckDebugStepCallWrapper()); 2423 CheckDebugStepCallWrapper());
2424 2424
2425 // The function is a "classConstructor", need to raise an exception. 2425 // The function is a "classConstructor", need to raise an exception.
2426 __ bind(&class_constructor); 2426 __ bind(&class_constructor);
2427 { 2427 {
2428 FrameScope frame(masm, StackFrame::INTERNAL); 2428 FrameScope frame(masm, StackFrame::INTERNAL);
(...skipping 10 matching lines...) Expand all
2439 // -- a1 : the function to call (checked to be a JSBoundFunction) 2439 // -- a1 : the function to call (checked to be a JSBoundFunction)
2440 // ----------------------------------- 2440 // -----------------------------------
2441 __ AssertBoundFunction(a1); 2441 __ AssertBoundFunction(a1);
2442 2442
2443 if (tail_call_mode == TailCallMode::kAllow) { 2443 if (tail_call_mode == TailCallMode::kAllow) {
2444 PrepareForTailCall(masm, a0, t0, t1, t2); 2444 PrepareForTailCall(masm, a0, t0, t1, t2);
2445 } 2445 }
2446 2446
2447 // Patch the receiver to [[BoundThis]]. 2447 // Patch the receiver to [[BoundThis]].
2448 { 2448 {
2449 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); 2449 __ Ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2450 __ Dlsa(a4, sp, a0, kPointerSizeLog2); 2450 __ Dlsa(a4, sp, a0, kPointerSizeLog2);
2451 __ sd(at, MemOperand(a4)); 2451 __ Sd(at, MemOperand(a4));
2452 } 2452 }
2453 2453
2454 // Load [[BoundArguments]] into a2 and length of that into a4. 2454 // Load [[BoundArguments]] into a2 and length of that into a4.
2455 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); 2455 __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2456 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); 2456 __ Lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2457 2457
2458 // ----------- S t a t e ------------- 2458 // ----------- S t a t e -------------
2459 // -- a0 : the number of arguments (not including the receiver) 2459 // -- a0 : the number of arguments (not including the receiver)
2460 // -- a1 : the function to call (checked to be a JSBoundFunction) 2460 // -- a1 : the function to call (checked to be a JSBoundFunction)
2461 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) 2461 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2462 // -- a4 : the number of [[BoundArguments]] 2462 // -- a4 : the number of [[BoundArguments]]
2463 // ----------------------------------- 2463 // -----------------------------------
2464 2464
2465 // Reserve stack space for the [[BoundArguments]]. 2465 // Reserve stack space for the [[BoundArguments]].
2466 { 2466 {
(...skipping 14 matching lines...) Expand all
2481 __ bind(&done); 2481 __ bind(&done);
2482 } 2482 }
2483 2483
2484 // Relocate arguments down the stack. 2484 // Relocate arguments down the stack.
2485 { 2485 {
2486 Label loop, done_loop; 2486 Label loop, done_loop;
2487 __ mov(a5, zero_reg); 2487 __ mov(a5, zero_reg);
2488 __ bind(&loop); 2488 __ bind(&loop);
2489 __ Branch(&done_loop, gt, a5, Operand(a0)); 2489 __ Branch(&done_loop, gt, a5, Operand(a0));
2490 __ Dlsa(a6, sp, a4, kPointerSizeLog2); 2490 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2491 __ ld(at, MemOperand(a6)); 2491 __ Ld(at, MemOperand(a6));
2492 __ Dlsa(a6, sp, a5, kPointerSizeLog2); 2492 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2493 __ sd(at, MemOperand(a6)); 2493 __ Sd(at, MemOperand(a6));
2494 __ Daddu(a4, a4, Operand(1)); 2494 __ Daddu(a4, a4, Operand(1));
2495 __ Daddu(a5, a5, Operand(1)); 2495 __ Daddu(a5, a5, Operand(1));
2496 __ Branch(&loop); 2496 __ Branch(&loop);
2497 __ bind(&done_loop); 2497 __ bind(&done_loop);
2498 } 2498 }
2499 2499
2500 // Copy [[BoundArguments]] to the stack (below the arguments). 2500 // Copy [[BoundArguments]] to the stack (below the arguments).
2501 { 2501 {
2502 Label loop, done_loop; 2502 Label loop, done_loop;
2503 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); 2503 __ Lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2504 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 2504 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2505 __ bind(&loop); 2505 __ bind(&loop);
2506 __ Dsubu(a4, a4, Operand(1)); 2506 __ Dsubu(a4, a4, Operand(1));
2507 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); 2507 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2508 __ Dlsa(a5, a2, a4, kPointerSizeLog2); 2508 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2509 __ ld(at, MemOperand(a5)); 2509 __ Ld(at, MemOperand(a5));
2510 __ Dlsa(a5, sp, a0, kPointerSizeLog2); 2510 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2511 __ sd(at, MemOperand(a5)); 2511 __ Sd(at, MemOperand(a5));
2512 __ Daddu(a0, a0, Operand(1)); 2512 __ Daddu(a0, a0, Operand(1));
2513 __ Branch(&loop); 2513 __ Branch(&loop);
2514 __ bind(&done_loop); 2514 __ bind(&done_loop);
2515 } 2515 }
2516 2516
2517 // Call the [[BoundTargetFunction]] via the Call builtin. 2517 // Call the [[BoundTargetFunction]] via the Call builtin.
2518 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); 2518 __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2519 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, 2519 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2520 masm->isolate()))); 2520 masm->isolate())));
2521 __ ld(at, MemOperand(at)); 2521 __ Ld(at, MemOperand(at));
2522 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); 2522 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2523 __ Jump(at); 2523 __ Jump(at);
2524 } 2524 }
2525 2525
2526 // static 2526 // static
2527 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, 2527 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2528 TailCallMode tail_call_mode) { 2528 TailCallMode tail_call_mode) {
2529 // ----------- S t a t e ------------- 2529 // ----------- S t a t e -------------
2530 // -- a0 : the number of arguments (not including the receiver) 2530 // -- a0 : the number of arguments (not including the receiver)
2531 // -- a1 : the target to call (can be any Object). 2531 // -- a1 : the target to call (can be any Object).
2532 // ----------------------------------- 2532 // -----------------------------------
2533 2533
2534 Label non_callable, non_function, non_smi; 2534 Label non_callable, non_function, non_smi;
2535 __ JumpIfSmi(a1, &non_callable); 2535 __ JumpIfSmi(a1, &non_callable);
2536 __ bind(&non_smi); 2536 __ bind(&non_smi);
2537 __ GetObjectType(a1, t1, t2); 2537 __ GetObjectType(a1, t1, t2);
2538 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), 2538 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2539 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE)); 2539 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2540 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), 2540 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2541 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE)); 2541 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2542 2542
2543 // Check if target has a [[Call]] internal method. 2543 // Check if target has a [[Call]] internal method.
2544 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset)); 2544 __ Lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2545 __ And(t1, t1, Operand(1 << Map::kIsCallable)); 2545 __ And(t1, t1, Operand(1 << Map::kIsCallable));
2546 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); 2546 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2547 2547
2548 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE)); 2548 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2549 2549
2550 // 0. Prepare for tail call if necessary. 2550 // 0. Prepare for tail call if necessary.
2551 if (tail_call_mode == TailCallMode::kAllow) { 2551 if (tail_call_mode == TailCallMode::kAllow) {
2552 PrepareForTailCall(masm, a0, t0, t1, t2); 2552 PrepareForTailCall(masm, a0, t0, t1, t2);
2553 } 2553 }
2554 2554
2555 // 1. Runtime fallback for Proxy [[Call]]. 2555 // 1. Runtime fallback for Proxy [[Call]].
2556 __ Push(a1); 2556 __ Push(a1);
2557 // Increase the arguments size to include the pushed function and the 2557 // Increase the arguments size to include the pushed function and the
2558 // existing receiver on the stack. 2558 // existing receiver on the stack.
2559 __ Daddu(a0, a0, 2); 2559 __ Daddu(a0, a0, 2);
2560 // Tail-call to the runtime. 2560 // Tail-call to the runtime.
2561 __ JumpToExternalReference( 2561 __ JumpToExternalReference(
2562 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); 2562 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2563 2563
2564 // 2. Call to something else, which might have a [[Call]] internal method (if 2564 // 2. Call to something else, which might have a [[Call]] internal method (if
2565 // not we raise an exception). 2565 // not we raise an exception).
2566 __ bind(&non_function); 2566 __ bind(&non_function);
2567 // Overwrite the original receiver with the (original) target. 2567 // Overwrite the original receiver with the (original) target.
2568 __ Dlsa(at, sp, a0, kPointerSizeLog2); 2568 __ Dlsa(at, sp, a0, kPointerSizeLog2);
2569 __ sd(a1, MemOperand(at)); 2569 __ Sd(a1, MemOperand(at));
2570 // Let the "call_as_function_delegate" take care of the rest. 2570 // Let the "call_as_function_delegate" take care of the rest.
2571 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); 2571 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2572 __ Jump(masm->isolate()->builtins()->CallFunction( 2572 __ Jump(masm->isolate()->builtins()->CallFunction(
2573 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), 2573 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2574 RelocInfo::CODE_TARGET); 2574 RelocInfo::CODE_TARGET);
2575 2575
2576 // 3. Call to something that is not callable. 2576 // 3. Call to something that is not callable.
2577 __ bind(&non_callable); 2577 __ bind(&non_callable);
2578 { 2578 {
2579 FrameScope scope(masm, StackFrame::INTERNAL); 2579 FrameScope scope(masm, StackFrame::INTERNAL);
(...skipping 11 matching lines...) Expand all
2591 Register scratch2 = t1; 2591 Register scratch2 = t1;
2592 2592
2593 Register spread = a2; 2593 Register spread = a2;
2594 Register spread_map = a4; 2594 Register spread_map = a4;
2595 2595
2596 Register spread_len = a4; 2596 Register spread_len = a4;
2597 2597
2598 Register native_context = a5; 2598 Register native_context = a5;
2599 2599
2600 Label runtime_call, push_args; 2600 Label runtime_call, push_args;
2601 __ ld(spread, MemOperand(sp, 0)); 2601 __ Ld(spread, MemOperand(sp, 0));
2602 __ JumpIfSmi(spread, &runtime_call); 2602 __ JumpIfSmi(spread, &runtime_call);
2603 __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); 2603 __ Ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2604 __ ld(native_context, NativeContextMemOperand()); 2604 __ Ld(native_context, NativeContextMemOperand());
2605 2605
2606 // Check that the spread is an array. 2606 // Check that the spread is an array.
2607 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); 2607 __ Lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
2608 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); 2608 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
2609 2609
2610 // Check that we have the original ArrayPrototype. 2610 // Check that we have the original ArrayPrototype.
2611 __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); 2611 __ Ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2612 __ ld(scratch2, ContextMemOperand(native_context, 2612 __ Ld(scratch2, ContextMemOperand(native_context,
2613 Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); 2613 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2614 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); 2614 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2615 2615
2616 // Check that the ArrayPrototype hasn't been modified in a way that would 2616 // Check that the ArrayPrototype hasn't been modified in a way that would
2617 // affect iteration. 2617 // affect iteration.
2618 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); 2618 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2619 __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); 2619 __ Ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2620 __ Branch(&runtime_call, ne, scratch, 2620 __ Branch(&runtime_call, ne, scratch,
2621 Operand(Smi::FromInt(Isolate::kProtectorValid))); 2621 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2622 2622
2623 // Check that the map of the initial array iterator hasn't changed. 2623 // Check that the map of the initial array iterator hasn't changed.
2624 __ ld(scratch, 2624 __ Ld(scratch,
2625 ContextMemOperand(native_context, 2625 ContextMemOperand(native_context,
2626 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); 2626 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2627 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); 2627 __ Ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2628 __ ld(scratch2, 2628 __ Ld(scratch2,
2629 ContextMemOperand(native_context, 2629 ContextMemOperand(native_context,
2630 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); 2630 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2631 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); 2631 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2632 2632
2633 // For FastPacked kinds, iteration will have the same effect as simply 2633 // For FastPacked kinds, iteration will have the same effect as simply
2634 // accessing each property in order. 2634 // accessing each property in order.
2635 Label no_protector_check; 2635 Label no_protector_check;
2636 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); 2636 __ Lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2637 __ DecodeField<Map::ElementsKindBits>(scratch); 2637 __ DecodeField<Map::ElementsKindBits>(scratch);
2638 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); 2638 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
2639 // For non-FastHoley kinds, we can skip the protector check. 2639 // For non-FastHoley kinds, we can skip the protector check.
2640 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); 2640 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
2641 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); 2641 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
2642 // Check the ArrayProtector cell. 2642 // Check the ArrayProtector cell.
2643 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); 2643 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2644 __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); 2644 __ Ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2645 __ Branch(&runtime_call, ne, scratch, 2645 __ Branch(&runtime_call, ne, scratch,
2646 Operand(Smi::FromInt(Isolate::kProtectorValid))); 2646 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2647 2647
2648 __ bind(&no_protector_check); 2648 __ bind(&no_protector_check);
2649 // Load the FixedArray backing store, but use the length from the array. 2649 // Load the FixedArray backing store, but use the length from the array.
2650 __ lw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset)); 2650 __ Lw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset));
2651 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); 2651 __ Ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2652 __ Branch(&push_args); 2652 __ Branch(&push_args);
2653 2653
2654 __ bind(&runtime_call); 2654 __ bind(&runtime_call);
2655 { 2655 {
2656 // Call the builtin for the result of the spread. 2656 // Call the builtin for the result of the spread.
2657 FrameScope scope(masm, StackFrame::INTERNAL); 2657 FrameScope scope(masm, StackFrame::INTERNAL);
2658 __ SmiTag(argc); 2658 __ SmiTag(argc);
2659 __ Push(constructor, new_target, argc, spread); 2659 __ Push(constructor, new_target, argc, spread);
2660 __ CallRuntime(Runtime::kSpreadIterableFixed); 2660 __ CallRuntime(Runtime::kSpreadIterableFixed);
2661 __ mov(spread, v0); 2661 __ mov(spread, v0);
2662 __ Pop(constructor, new_target, argc); 2662 __ Pop(constructor, new_target, argc);
2663 __ SmiUntag(argc); 2663 __ SmiUntag(argc);
2664 } 2664 }
2665 2665
2666 { 2666 {
2667 // Calculate the new nargs including the result of the spread. 2667 // Calculate the new nargs including the result of the spread.
2668 __ lw(spread_len, 2668 __ Lw(spread_len,
2669 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset)); 2669 UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2670 2670
2671 __ bind(&push_args); 2671 __ bind(&push_args);
2672 // argc += spread_len - 1. Subtract 1 for the spread itself. 2672 // argc += spread_len - 1. Subtract 1 for the spread itself.
2673 __ Daddu(argc, argc, spread_len); 2673 __ Daddu(argc, argc, spread_len);
2674 __ Dsubu(argc, argc, Operand(1)); 2674 __ Dsubu(argc, argc, Operand(1));
2675 2675
2676 // Pop the spread argument off the stack. 2676 // Pop the spread argument off the stack.
2677 __ Pop(scratch); 2677 __ Pop(scratch);
2678 } 2678 }
(...skipping 14 matching lines...) Expand all
2693 __ bind(&done); 2693 __ bind(&done);
2694 } 2694 }
2695 2695
2696 // Put the evaluated spread onto the stack as additional arguments. 2696 // Put the evaluated spread onto the stack as additional arguments.
2697 { 2697 {
2698 __ mov(scratch, zero_reg); 2698 __ mov(scratch, zero_reg);
2699 Label done, push, loop; 2699 Label done, push, loop;
2700 __ bind(&loop); 2700 __ bind(&loop);
2701 __ Branch(&done, eq, scratch, Operand(spread_len)); 2701 __ Branch(&done, eq, scratch, Operand(spread_len));
2702 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2); 2702 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2);
2703 __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); 2703 __ Ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2704 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push); 2704 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
2705 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex); 2705 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
2706 __ bind(&push); 2706 __ bind(&push);
2707 __ Push(scratch2); 2707 __ Push(scratch2);
2708 __ Daddu(scratch, scratch, Operand(1)); 2708 __ Daddu(scratch, scratch, Operand(1));
2709 __ Branch(&loop); 2709 __ Branch(&loop);
2710 __ bind(&done); 2710 __ bind(&done);
2711 } 2711 }
2712 } 2712 }
2713 2713
(...skipping 19 matching lines...) Expand all
2733 // -- a3 : the new target (checked to be a constructor) 2733 // -- a3 : the new target (checked to be a constructor)
2734 // ----------------------------------- 2734 // -----------------------------------
2735 __ AssertFunction(a1); 2735 __ AssertFunction(a1);
2736 2736
2737 // Calling convention for function specific ConstructStubs require 2737 // Calling convention for function specific ConstructStubs require
2738 // a2 to contain either an AllocationSite or undefined. 2738 // a2 to contain either an AllocationSite or undefined.
2739 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2739 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2740 2740
2741 // Tail call to the function-specific construct stub (still in the caller 2741 // Tail call to the function-specific construct stub (still in the caller
2742 // context at this point). 2742 // context at this point).
2743 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); 2743 __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2744 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); 2744 __ Ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2745 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); 2745 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
2746 __ Jump(at); 2746 __ Jump(at);
2747 } 2747 }
2748 2748
2749 // static 2749 // static
2750 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 2750 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2751 // ----------- S t a t e ------------- 2751 // ----------- S t a t e -------------
2752 // -- a0 : the number of arguments (not including the receiver) 2752 // -- a0 : the number of arguments (not including the receiver)
2753 // -- a1 : the function to call (checked to be a JSBoundFunction) 2753 // -- a1 : the function to call (checked to be a JSBoundFunction)
2754 // -- a3 : the new target (checked to be a constructor) 2754 // -- a3 : the new target (checked to be a constructor)
2755 // ----------------------------------- 2755 // -----------------------------------
2756 __ AssertBoundFunction(a1); 2756 __ AssertBoundFunction(a1);
2757 2757
2758 // Load [[BoundArguments]] into a2 and length of that into a4. 2758 // Load [[BoundArguments]] into a2 and length of that into a4.
2759 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); 2759 __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2760 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); 2760 __ Lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2761 2761
2762 // ----------- S t a t e ------------- 2762 // ----------- S t a t e -------------
2763 // -- a0 : the number of arguments (not including the receiver) 2763 // -- a0 : the number of arguments (not including the receiver)
2764 // -- a1 : the function to call (checked to be a JSBoundFunction) 2764 // -- a1 : the function to call (checked to be a JSBoundFunction)
2765 // -- a2 : the [[BoundArguments]] (implemented as FixedArray) 2765 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2766 // -- a3 : the new target (checked to be a constructor) 2766 // -- a3 : the new target (checked to be a constructor)
2767 // -- a4 : the number of [[BoundArguments]] 2767 // -- a4 : the number of [[BoundArguments]]
2768 // ----------------------------------- 2768 // -----------------------------------
2769 2769
2770 // Reserve stack space for the [[BoundArguments]]. 2770 // Reserve stack space for the [[BoundArguments]].
(...skipping 15 matching lines...) Expand all
2786 __ bind(&done); 2786 __ bind(&done);
2787 } 2787 }
2788 2788
2789 // Relocate arguments down the stack. 2789 // Relocate arguments down the stack.
2790 { 2790 {
2791 Label loop, done_loop; 2791 Label loop, done_loop;
2792 __ mov(a5, zero_reg); 2792 __ mov(a5, zero_reg);
2793 __ bind(&loop); 2793 __ bind(&loop);
2794 __ Branch(&done_loop, ge, a5, Operand(a0)); 2794 __ Branch(&done_loop, ge, a5, Operand(a0));
2795 __ Dlsa(a6, sp, a4, kPointerSizeLog2); 2795 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2796 __ ld(at, MemOperand(a6)); 2796 __ Ld(at, MemOperand(a6));
2797 __ Dlsa(a6, sp, a5, kPointerSizeLog2); 2797 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2798 __ sd(at, MemOperand(a6)); 2798 __ Sd(at, MemOperand(a6));
2799 __ Daddu(a4, a4, Operand(1)); 2799 __ Daddu(a4, a4, Operand(1));
2800 __ Daddu(a5, a5, Operand(1)); 2800 __ Daddu(a5, a5, Operand(1));
2801 __ Branch(&loop); 2801 __ Branch(&loop);
2802 __ bind(&done_loop); 2802 __ bind(&done_loop);
2803 } 2803 }
2804 2804
2805 // Copy [[BoundArguments]] to the stack (below the arguments). 2805 // Copy [[BoundArguments]] to the stack (below the arguments).
2806 { 2806 {
2807 Label loop, done_loop; 2807 Label loop, done_loop;
2808 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); 2808 __ Lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset));
2809 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 2809 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2810 __ bind(&loop); 2810 __ bind(&loop);
2811 __ Dsubu(a4, a4, Operand(1)); 2811 __ Dsubu(a4, a4, Operand(1));
2812 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); 2812 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2813 __ Dlsa(a5, a2, a4, kPointerSizeLog2); 2813 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2814 __ ld(at, MemOperand(a5)); 2814 __ Ld(at, MemOperand(a5));
2815 __ Dlsa(a5, sp, a0, kPointerSizeLog2); 2815 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2816 __ sd(at, MemOperand(a5)); 2816 __ Sd(at, MemOperand(a5));
2817 __ Daddu(a0, a0, Operand(1)); 2817 __ Daddu(a0, a0, Operand(1));
2818 __ Branch(&loop); 2818 __ Branch(&loop);
2819 __ bind(&done_loop); 2819 __ bind(&done_loop);
2820 } 2820 }
2821 2821
2822 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. 2822 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2823 { 2823 {
2824 Label skip_load; 2824 Label skip_load;
2825 __ Branch(&skip_load, ne, a1, Operand(a3)); 2825 __ Branch(&skip_load, ne, a1, Operand(a3));
2826 __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); 2826 __ Ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2827 __ bind(&skip_load); 2827 __ bind(&skip_load);
2828 } 2828 }
2829 2829
2830 // Construct the [[BoundTargetFunction]] via the Construct builtin. 2830 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2831 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); 2831 __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2832 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); 2832 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2833 __ ld(at, MemOperand(at)); 2833 __ Ld(at, MemOperand(at));
2834 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); 2834 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2835 __ Jump(at); 2835 __ Jump(at);
2836 } 2836 }
2837 2837
2838 // static 2838 // static
2839 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { 2839 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2840 // ----------- S t a t e ------------- 2840 // ----------- S t a t e -------------
2841 // -- a0 : the number of arguments (not including the receiver) 2841 // -- a0 : the number of arguments (not including the receiver)
2842 // -- a1 : the constructor to call (checked to be a JSProxy) 2842 // -- a1 : the constructor to call (checked to be a JSProxy)
2843 // -- a3 : the new target (either the same as the constructor or 2843 // -- a3 : the new target (either the same as the constructor or
(...skipping 16 matching lines...) Expand all
2860 // -- a1 : the constructor to call (can be any Object) 2860 // -- a1 : the constructor to call (can be any Object)
2861 // -- a3 : the new target (either the same as the constructor or 2861 // -- a3 : the new target (either the same as the constructor or
2862 // the JSFunction on which new was invoked initially) 2862 // the JSFunction on which new was invoked initially)
2863 // ----------------------------------- 2863 // -----------------------------------
2864 2864
2865 // Check if target is a Smi. 2865 // Check if target is a Smi.
2866 Label non_constructor; 2866 Label non_constructor;
2867 __ JumpIfSmi(a1, &non_constructor); 2867 __ JumpIfSmi(a1, &non_constructor);
2868 2868
2869 // Dispatch based on instance type. 2869 // Dispatch based on instance type.
2870 __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); 2870 __ Ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2871 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset)); 2871 __ Lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2872 __ Jump(masm->isolate()->builtins()->ConstructFunction(), 2872 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2873 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE)); 2873 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2874 2874
2875 // Check if target has a [[Construct]] internal method. 2875 // Check if target has a [[Construct]] internal method.
2876 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset)); 2876 __ Lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2877 __ And(t3, t3, Operand(1 << Map::kIsConstructor)); 2877 __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2878 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); 2878 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2879 2879
2880 // Only dispatch to bound functions after checking whether they are 2880 // Only dispatch to bound functions after checking whether they are
2881 // constructors. 2881 // constructors.
2882 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), 2882 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2883 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE)); 2883 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2884 2884
2885 // Only dispatch to proxies after checking whether they are constructors. 2885 // Only dispatch to proxies after checking whether they are constructors.
2886 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, 2886 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2887 eq, t2, Operand(JS_PROXY_TYPE)); 2887 eq, t2, Operand(JS_PROXY_TYPE));
2888 2888
2889 // Called Construct on an exotic Object with a [[Construct]] internal method. 2889 // Called Construct on an exotic Object with a [[Construct]] internal method.
2890 { 2890 {
2891 // Overwrite the original receiver with the (original) target. 2891 // Overwrite the original receiver with the (original) target.
2892 __ Dlsa(at, sp, a0, kPointerSizeLog2); 2892 __ Dlsa(at, sp, a0, kPointerSizeLog2);
2893 __ sd(a1, MemOperand(at)); 2893 __ Sd(a1, MemOperand(at));
2894 // Let the "call_as_constructor_delegate" take care of the rest. 2894 // Let the "call_as_constructor_delegate" take care of the rest.
2895 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1); 2895 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2896 __ Jump(masm->isolate()->builtins()->CallFunction(), 2896 __ Jump(masm->isolate()->builtins()->CallFunction(),
2897 RelocInfo::CODE_TARGET); 2897 RelocInfo::CODE_TARGET);
2898 } 2898 }
2899 2899
2900 // Called Construct on an Object that doesn't have a [[Construct]] internal 2900 // Called Construct on an Object that doesn't have a [[Construct]] internal
2901 // method. 2901 // method.
2902 __ bind(&non_constructor); 2902 __ bind(&non_constructor);
2903 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), 2903 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
2990 2990
2991 // Copy the arguments (including the receiver) to the new stack frame. 2991 // Copy the arguments (including the receiver) to the new stack frame.
2992 // a0: copy start address 2992 // a0: copy start address
2993 // a1: function 2993 // a1: function
2994 // a2: expected number of arguments 2994 // a2: expected number of arguments
2995 // a3: new target (passed through to callee) 2995 // a3: new target (passed through to callee)
2996 // a4: copy end address 2996 // a4: copy end address
2997 2997
2998 Label copy; 2998 Label copy;
2999 __ bind(&copy); 2999 __ bind(&copy);
3000 __ ld(a5, MemOperand(a0)); 3000 __ Ld(a5, MemOperand(a0));
3001 __ push(a5); 3001 __ push(a5);
3002 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4)); 3002 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
3003 __ daddiu(a0, a0, -kPointerSize); // In delay slot. 3003 __ daddiu(a0, a0, -kPointerSize); // In delay slot.
3004 3004
3005 __ jmp(&invoke); 3005 __ jmp(&invoke);
3006 } 3006 }
3007 3007
3008 { // Too few parameters: Actual < expected. 3008 { // Too few parameters: Actual < expected.
3009 __ bind(&too_few); 3009 __ bind(&too_few);
3010 EnterArgumentsAdaptorFrame(masm); 3010 EnterArgumentsAdaptorFrame(masm);
(...skipping 12 matching lines...) Expand all
3023 __ Daddu(a7, fp, kPointerSize); 3023 __ Daddu(a7, fp, kPointerSize);
3024 3024
3025 // Copy the arguments (including the receiver) to the new stack frame. 3025 // Copy the arguments (including the receiver) to the new stack frame.
3026 // a0: copy start address 3026 // a0: copy start address
3027 // a1: function 3027 // a1: function
3028 // a2: expected number of arguments 3028 // a2: expected number of arguments
3029 // a3: new target (passed through to callee) 3029 // a3: new target (passed through to callee)
3030 // a7: copy end address 3030 // a7: copy end address
3031 Label copy; 3031 Label copy;
3032 __ bind(&copy); 3032 __ bind(&copy);
3033 __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver. 3033 __ Ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver.
3034 __ Dsubu(sp, sp, kPointerSize); 3034 __ Dsubu(sp, sp, kPointerSize);
3035 __ Dsubu(a0, a0, kPointerSize); 3035 __ Dsubu(a0, a0, kPointerSize);
3036 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7)); 3036 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
3037 __ sd(a4, MemOperand(sp)); // In the delay slot. 3037 __ Sd(a4, MemOperand(sp)); // In the delay slot.
3038 3038
3039 // Fill the remaining expected arguments with undefined. 3039 // Fill the remaining expected arguments with undefined.
3040 // a1: function 3040 // a1: function
3041 // a2: expected number of arguments 3041 // a2: expected number of arguments
3042 // a3: new target (passed through to callee) 3042 // a3: new target (passed through to callee)
3043 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); 3043 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
3044 __ dsll(a6, a2, kPointerSizeLog2); 3044 __ dsll(a6, a2, kPointerSizeLog2);
3045 __ Dsubu(a4, fp, Operand(a6)); 3045 __ Dsubu(a4, fp, Operand(a6));
3046 // Adjust for frame. 3046 // Adjust for frame.
3047 __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + 3047 __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
3048 2 * kPointerSize)); 3048 2 * kPointerSize));
3049 3049
3050 Label fill; 3050 Label fill;
3051 __ bind(&fill); 3051 __ bind(&fill);
3052 __ Dsubu(sp, sp, kPointerSize); 3052 __ Dsubu(sp, sp, kPointerSize);
3053 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4)); 3053 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
3054 __ sd(a5, MemOperand(sp)); 3054 __ Sd(a5, MemOperand(sp));
3055 } 3055 }
3056 3056
3057 // Call the entry point. 3057 // Call the entry point.
3058 __ bind(&invoke); 3058 __ bind(&invoke);
3059 __ mov(a0, a2); 3059 __ mov(a0, a2);
3060 // a0 : expected number of arguments 3060 // a0 : expected number of arguments
3061 // a1 : function (passed through to callee) 3061 // a1 : function (passed through to callee)
3062 // a3: new target (passed through to callee) 3062 // a3: new target (passed through to callee)
3063 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 3063 __ Ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3064 __ Call(a4); 3064 __ Call(a4);
3065 3065
3066 // Store offset of return address for deoptimizer. 3066 // Store offset of return address for deoptimizer.
3067 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 3067 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3068 3068
3069 // Exit frame and return. 3069 // Exit frame and return.
3070 LeaveArgumentsAdaptorFrame(masm); 3070 LeaveArgumentsAdaptorFrame(masm);
3071 __ Ret(); 3071 __ Ret();
3072 3072
3073 // ------------------------------------------- 3073 // -------------------------------------------
3074 // Don't adapt arguments. 3074 // Don't adapt arguments.
3075 // ------------------------------------------- 3075 // -------------------------------------------
3076 __ bind(&dont_adapt_arguments); 3076 __ bind(&dont_adapt_arguments);
3077 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 3077 __ Ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3078 __ Jump(a4); 3078 __ Jump(a4);
3079 3079
3080 __ bind(&stack_overflow); 3080 __ bind(&stack_overflow);
3081 { 3081 {
3082 FrameScope frame(masm, StackFrame::MANUAL); 3082 FrameScope frame(masm, StackFrame::MANUAL);
3083 __ CallRuntime(Runtime::kThrowStackOverflow); 3083 __ CallRuntime(Runtime::kThrowStackOverflow);
3084 __ break_(0xCC); 3084 __ break_(0xCC);
3085 } 3085 }
3086 } 3086 }
3087 3087
(...skipping 22 matching lines...) Expand all
3110 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); 3110 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
3111 __ Jump(at); 3111 __ Jump(at);
3112 } 3112 }
3113 3113
3114 #undef __ 3114 #undef __
3115 3115
3116 } // namespace internal 3116 } // namespace internal
3117 } // namespace v8 3117 } // namespace v8
3118 3118
3119 #endif // V8_TARGET_ARCH_MIPS64 3119 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « no previous file | src/compiler/mips64/code-generator-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698