OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
404 EmitProfilingCounterReset(); | 404 EmitProfilingCounterReset(); |
405 | 405 |
406 __ bind(&ok); | 406 __ bind(&ok); |
407 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 407 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
408 // Record a mapping of the OSR id to this PC. This is used if the OSR | 408 // Record a mapping of the OSR id to this PC. This is used if the OSR |
409 // entry becomes the target of a bailout. We don't expect it to be, but | 409 // entry becomes the target of a bailout. We don't expect it to be, but |
410 // we want it to work if it is. | 410 // we want it to work if it is. |
411 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 411 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
412 } | 412 } |
413 | 413 |
| 414 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( |
| 415 bool is_tail_call) { |
| 416 // Pretend that the exit is a backwards jump to the entry. |
| 417 int weight = 1; |
| 418 if (info_->ShouldSelfOptimize()) { |
| 419 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 420 } else { |
| 421 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; |
| 422 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
| 423 } |
| 424 EmitProfilingCounterDecrement(weight); |
| 425 Label ok; |
| 426 __ cmpi(r6, Operand::Zero()); |
| 427 __ bge(&ok); |
| 428 // Don't need to save result register if we are going to do a tail call. |
| 429 if (!is_tail_call) { |
| 430 __ push(r3); |
| 431 } |
| 432 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 433 if (!is_tail_call) { |
| 434 __ pop(r3); |
| 435 } |
| 436 EmitProfilingCounterReset(); |
| 437 __ bind(&ok); |
| 438 } |
414 | 439 |
415 void FullCodeGenerator::EmitReturnSequence() { | 440 void FullCodeGenerator::EmitReturnSequence() { |
416 Comment cmnt(masm_, "[ Return sequence"); | 441 Comment cmnt(masm_, "[ Return sequence"); |
417 if (return_label_.is_bound()) { | 442 if (return_label_.is_bound()) { |
418 __ b(&return_label_); | 443 __ b(&return_label_); |
419 } else { | 444 } else { |
420 __ bind(&return_label_); | 445 __ bind(&return_label_); |
421 if (FLAG_trace) { | 446 if (FLAG_trace) { |
422 // Push the return value on the stack as the parameter. | 447 // Push the return value on the stack as the parameter. |
423 // Runtime::TraceExit returns its parameter in r3 | 448 // Runtime::TraceExit returns its parameter in r3 |
424 __ push(r3); | 449 __ push(r3); |
425 __ CallRuntime(Runtime::kTraceExit); | 450 __ CallRuntime(Runtime::kTraceExit); |
426 } | 451 } |
427 // Pretend that the exit is a backwards jump to the entry. | 452 EmitProfilingCounterHandlingForReturnSequence(false); |
428 int weight = 1; | |
429 if (info_->ShouldSelfOptimize()) { | |
430 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | |
431 } else { | |
432 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; | |
433 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); | |
434 } | |
435 EmitProfilingCounterDecrement(weight); | |
436 Label ok; | |
437 __ cmpi(r6, Operand::Zero()); | |
438 __ bge(&ok); | |
439 __ push(r3); | |
440 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | |
441 __ pop(r3); | |
442 EmitProfilingCounterReset(); | |
443 __ bind(&ok); | |
444 | 453 |
445 // Make sure that the constant pool is not emitted inside of the return | 454 // Make sure that the constant pool is not emitted inside of the return |
446 // sequence. | 455 // sequence. |
447 { | 456 { |
448 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 457 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
449 int32_t arg_count = info_->scope()->num_parameters() + 1; | 458 int32_t arg_count = info_->scope()->num_parameters() + 1; |
450 int32_t sp_delta = arg_count * kPointerSize; | 459 int32_t sp_delta = arg_count * kPointerSize; |
451 SetReturnPosition(literal()); | 460 SetReturnPosition(literal()); |
452 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); | 461 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); |
453 __ blr(); | 462 __ blr(); |
(...skipping 2280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2734 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { | 2743 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { |
2735 // Load the arguments. | 2744 // Load the arguments. |
2736 ZoneList<Expression*>* args = expr->arguments(); | 2745 ZoneList<Expression*>* args = expr->arguments(); |
2737 int arg_count = args->length(); | 2746 int arg_count = args->length(); |
2738 for (int i = 0; i < arg_count; i++) { | 2747 for (int i = 0; i < arg_count; i++) { |
2739 VisitForStackValue(args->at(i)); | 2748 VisitForStackValue(args->at(i)); |
2740 } | 2749 } |
2741 | 2750 |
2742 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 2751 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
2743 SetCallPosition(expr); | 2752 SetCallPosition(expr); |
| 2753 if (expr->tail_call_mode() == TailCallMode::kAllow) { |
| 2754 if (FLAG_trace) { |
| 2755 __ CallRuntime(Runtime::kTraceTailCall); |
| 2756 } |
| 2757 // Update profiling counters before the tail call since we will |
| 2758 // not return to this function. |
| 2759 EmitProfilingCounterHandlingForReturnSequence(true); |
| 2760 } |
2744 Handle<Code> ic = | 2761 Handle<Code> ic = |
2745 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) | 2762 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) |
2746 .code(); | 2763 .code(); |
2747 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot())); | 2764 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot())); |
2748 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2765 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
2749 // Don't assign a type feedback id to the IC, since type feedback is provided | 2766 // Don't assign a type feedback id to the IC, since type feedback is provided |
2750 // by the vector above. | 2767 // by the vector above. |
2751 CallIC(ic); | 2768 CallIC(ic); |
2752 | 2769 |
2753 RecordJSReturnSite(expr); | 2770 RecordJSReturnSite(expr); |
(...skipping 1864 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4618 return ON_STACK_REPLACEMENT; | 4635 return ON_STACK_REPLACEMENT; |
4619 } | 4636 } |
4620 | 4637 |
4621 DCHECK(interrupt_address == | 4638 DCHECK(interrupt_address == |
4622 isolate->builtins()->OsrAfterStackCheck()->entry()); | 4639 isolate->builtins()->OsrAfterStackCheck()->entry()); |
4623 return OSR_AFTER_STACK_CHECK; | 4640 return OSR_AFTER_STACK_CHECK; |
4624 } | 4641 } |
4625 } // namespace internal | 4642 } // namespace internal |
4626 } // namespace v8 | 4643 } // namespace v8 |
4627 #endif // V8_TARGET_ARCH_PPC | 4644 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |