OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
313 // rbp[2]: function arguments | 313 // rbp[2]: function arguments |
314 // rbp[3]: receiver | 314 // rbp[3]: receiver |
315 // rbp[4]: function | 315 // rbp[4]: function |
316 static const int kArgumentsOffset = 2 * kPointerSize; | 316 static const int kArgumentsOffset = 2 * kPointerSize; |
317 static const int kReceiverOffset = 3 * kPointerSize; | 317 static const int kReceiverOffset = 3 * kPointerSize; |
318 static const int kFunctionOffset = 4 * kPointerSize; | 318 static const int kFunctionOffset = 4 * kPointerSize; |
319 __ push(Operand(rbp, kFunctionOffset)); | 319 __ push(Operand(rbp, kFunctionOffset)); |
320 __ push(Operand(rbp, kArgumentsOffset)); | 320 __ push(Operand(rbp, kArgumentsOffset)); |
321 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 321 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
322 | 322 |
323 // Check the stack for overflow or a break request. | 323 // Check the stack for overflow. We are not trying need to catch |
324 // We need to catch preemptions right here, otherwise an unlucky preemption | 324 // interruptions (e.g. debug break and preemption) here, so the "real stack |
325 // could show up as a failed apply. | 325 // limit" is checked. |
326 Label retry_preemption; | 326 Label okay; |
327 Label no_preemption; | 327 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); |
328 __ bind(&retry_preemption); | |
329 ExternalReference stack_guard_limit = | |
330 ExternalReference::address_of_stack_guard_limit(); | |
331 __ movq(kScratchRegister, stack_guard_limit); | |
332 __ movq(rcx, rsp); | 328 __ movq(rcx, rsp); |
333 __ subq(rcx, Operand(kScratchRegister, 0)); | 329 // Make rcx the space we have left. The stack might already be overflowed |
334 // rcx contains the difference between the stack limit and the stack top. | 330 // here which will cause rcx to become negative. |
335 // We use it below to check that there is enough room for the arguments. | 331 __ subq(rcx, kScratchRegister); |
336 __ j(above, &no_preemption); | |
337 | |
338 // Preemption! | |
339 // Because runtime functions always remove the receiver from the stack, we | |
340 // have to fake one to avoid underflowing the stack. | |
341 __ push(rax); | |
342 __ Push(Smi::FromInt(0)); | |
343 | |
344 // Do call to runtime routine. | |
345 __ CallRuntime(Runtime::kStackGuard, 1); | |
346 __ pop(rax); | |
347 __ jmp(&retry_preemption); | |
348 | |
349 __ bind(&no_preemption); | |
350 | |
351 Label okay; | |
352 // Make rdx the space we need for the array when it is unrolled onto the | 332 // Make rdx the space we need for the array when it is unrolled onto the |
353 // stack. | 333 // stack. |
354 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); | 334 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); |
| 335 // Check if the arguments will overflow the stack. |
355 __ cmpq(rcx, rdx); | 336 __ cmpq(rcx, rdx); |
356 __ j(greater, &okay); | 337 __ j(greater, &okay); // Signed comparison. |
357 | 338 |
358 // Too bad: Out of stack space. | 339 // Out of stack space. |
359 __ push(Operand(rbp, kFunctionOffset)); | 340 __ push(Operand(rbp, kFunctionOffset)); |
360 __ push(rax); | 341 __ push(rax); |
361 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | 342 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); |
362 __ bind(&okay); | 343 __ bind(&okay); |
363 // End of stack check. | 344 // End of stack check. |
364 | 345 |
365 // Push current index and limit. | 346 // Push current index and limit. |
366 const int kLimitOffset = | 347 const int kLimitOffset = |
367 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; | 348 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; |
368 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; | 349 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; |
(...skipping 896 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1265 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 1246 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
1266 Generate_JSEntryTrampolineHelper(masm, false); | 1247 Generate_JSEntryTrampolineHelper(masm, false); |
1267 } | 1248 } |
1268 | 1249 |
1269 | 1250 |
1270 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 1251 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
1271 Generate_JSEntryTrampolineHelper(masm, true); | 1252 Generate_JSEntryTrampolineHelper(masm, true); |
1272 } | 1253 } |
1273 | 1254 |
1274 } } // namespace v8::internal | 1255 } } // namespace v8::internal |
OLD | NEW |