OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
6 | 6 |
7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
407 EmitProfilingCounterReset(); | 407 EmitProfilingCounterReset(); |
408 | 408 |
409 __ Bind(&ok); | 409 __ Bind(&ok); |
410 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 410 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
411 // Record a mapping of the OSR id to this PC. This is used if the OSR | 411 // Record a mapping of the OSR id to this PC. This is used if the OSR |
412 // entry becomes the target of a bailout. We don't expect it to be, but | 412 // entry becomes the target of a bailout. We don't expect it to be, but |
413 // we want it to work if it is. | 413 // we want it to work if it is. |
414 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 414 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
415 } | 415 } |
416 | 416 |
| 417 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( |
| 418 bool is_tail_call) { |
| 419 // Pretend that the exit is a backwards jump to the entry. |
| 420 int weight = 1; |
| 421 if (info_->ShouldSelfOptimize()) { |
| 422 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 423 } else { |
| 424 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; |
| 425 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
| 426 } |
| 427 EmitProfilingCounterDecrement(weight); |
| 428 Label ok; |
| 429 __ B(pl, &ok); |
| 430 // Don't need to save result register if we are going to do a tail call. |
| 431 if (!is_tail_call) { |
| 432 __ Push(x0); |
| 433 } |
| 434 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 435 if (!is_tail_call) { |
| 436 __ Pop(x0); |
| 437 } |
| 438 EmitProfilingCounterReset(); |
| 439 __ Bind(&ok); |
| 440 } |
417 | 441 |
418 void FullCodeGenerator::EmitReturnSequence() { | 442 void FullCodeGenerator::EmitReturnSequence() { |
419 Comment cmnt(masm_, "[ Return sequence"); | 443 Comment cmnt(masm_, "[ Return sequence"); |
420 | 444 |
421 if (return_label_.is_bound()) { | 445 if (return_label_.is_bound()) { |
422 __ B(&return_label_); | 446 __ B(&return_label_); |
423 | 447 |
424 } else { | 448 } else { |
425 __ Bind(&return_label_); | 449 __ Bind(&return_label_); |
426 if (FLAG_trace) { | 450 if (FLAG_trace) { |
427 // Push the return value on the stack as the parameter. | 451 // Push the return value on the stack as the parameter. |
428 // Runtime::TraceExit returns its parameter in x0. | 452 // Runtime::TraceExit returns its parameter in x0. |
429 __ Push(result_register()); | 453 __ Push(result_register()); |
430 __ CallRuntime(Runtime::kTraceExit); | 454 __ CallRuntime(Runtime::kTraceExit); |
431 DCHECK(x0.Is(result_register())); | 455 DCHECK(x0.Is(result_register())); |
432 } | 456 } |
433 // Pretend that the exit is a backwards jump to the entry. | 457 EmitProfilingCounterHandlingForReturnSequence(false); |
434 int weight = 1; | |
435 if (info_->ShouldSelfOptimize()) { | |
436 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | |
437 } else { | |
438 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; | |
439 weight = Min(kMaxBackEdgeWeight, | |
440 Max(1, distance / kCodeSizeMultiplier)); | |
441 } | |
442 EmitProfilingCounterDecrement(weight); | |
443 Label ok; | |
444 __ B(pl, &ok); | |
445 __ Push(x0); | |
446 __ Call(isolate()->builtins()->InterruptCheck(), | |
447 RelocInfo::CODE_TARGET); | |
448 __ Pop(x0); | |
449 EmitProfilingCounterReset(); | |
450 __ Bind(&ok); | |
451 | 458 |
452 SetReturnPosition(literal()); | 459 SetReturnPosition(literal()); |
453 const Register& current_sp = __ StackPointer(); | 460 const Register& current_sp = __ StackPointer(); |
454 // Nothing ensures 16 bytes alignment here. | 461 // Nothing ensures 16 bytes alignment here. |
455 DCHECK(!current_sp.Is(csp)); | 462 DCHECK(!current_sp.Is(csp)); |
456 __ Mov(current_sp, fp); | 463 __ Mov(current_sp, fp); |
457 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex)); | 464 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex)); |
458 // Drop the arguments and receiver and return. | 465 // Drop the arguments and receiver and return. |
459 // TODO(all): This implementation is overkill as it supports 2**31+1 | 466 // TODO(all): This implementation is overkill as it supports 2**31+1 |
460 // arguments, consider how to improve it without creating a security | 467 // arguments, consider how to improve it without creating a security |
(...skipping 2092 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2553 ASM_LOCATION("FullCodeGenerator::EmitCall"); | 2560 ASM_LOCATION("FullCodeGenerator::EmitCall"); |
2554 // Load the arguments. | 2561 // Load the arguments. |
2555 ZoneList<Expression*>* args = expr->arguments(); | 2562 ZoneList<Expression*>* args = expr->arguments(); |
2556 int arg_count = args->length(); | 2563 int arg_count = args->length(); |
2557 for (int i = 0; i < arg_count; i++) { | 2564 for (int i = 0; i < arg_count; i++) { |
2558 VisitForStackValue(args->at(i)); | 2565 VisitForStackValue(args->at(i)); |
2559 } | 2566 } |
2560 | 2567 |
2561 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 2568 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
2562 SetCallPosition(expr); | 2569 SetCallPosition(expr); |
2563 | 2570 if (expr->tail_call_mode() == TailCallMode::kAllow) { |
| 2571 if (FLAG_trace) { |
| 2572 __ CallRuntime(Runtime::kTraceTailCall); |
| 2573 } |
| 2574 // Update profiling counters before the tail call since we will |
| 2575 // not return to this function. |
| 2576 EmitProfilingCounterHandlingForReturnSequence(true); |
| 2577 } |
2564 Handle<Code> ic = | 2578 Handle<Code> ic = |
2565 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) | 2579 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) |
2566 .code(); | 2580 .code(); |
2567 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot())); | 2581 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot())); |
2568 __ Peek(x1, (arg_count + 1) * kXRegSize); | 2582 __ Peek(x1, (arg_count + 1) * kXRegSize); |
2569 // Don't assign a type feedback id to the IC, since type feedback is provided | 2583 // Don't assign a type feedback id to the IC, since type feedback is provided |
2570 // by the vector above. | 2584 // by the vector above. |
2571 CallIC(ic); | 2585 CallIC(ic); |
2572 | 2586 |
2573 RecordJSReturnSite(expr); | 2587 RecordJSReturnSite(expr); |
(...skipping 2135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4709 } | 4723 } |
4710 | 4724 |
4711 return INTERRUPT; | 4725 return INTERRUPT; |
4712 } | 4726 } |
4713 | 4727 |
4714 | 4728 |
4715 } // namespace internal | 4729 } // namespace internal |
4716 } // namespace v8 | 4730 } // namespace v8 |
4717 | 4731 |
4718 #endif // V8_TARGET_ARCH_ARM64 | 4732 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |