| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 334 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 334 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
| 335 __ li(a2, Operand(profiling_counter_)); | 335 __ li(a2, Operand(profiling_counter_)); |
| 336 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 336 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
| 337 __ Subu(a3, a3, Operand(Smi::FromInt(delta))); | 337 __ Subu(a3, a3, Operand(Smi::FromInt(delta))); |
| 338 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 338 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
| 339 } | 339 } |
| 340 | 340 |
| 341 | 341 |
| 342 void FullCodeGenerator::EmitProfilingCounterReset() { | 342 void FullCodeGenerator::EmitProfilingCounterReset() { |
| 343 int reset_value = FLAG_interrupt_budget; | 343 int reset_value = FLAG_interrupt_budget; |
| 344 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | |
| 345 // Self-optimization is a one-off thing: if it fails, don't try again. | |
| 346 reset_value = Smi::kMaxValue; | |
| 347 } | |
| 348 if (isolate()->IsDebuggerActive()) { | 344 if (isolate()->IsDebuggerActive()) { |
| 349 // Detect debug break requests as soon as possible. | 345 // Detect debug break requests as soon as possible. |
| 350 reset_value = FLAG_interrupt_budget >> 4; | 346 reset_value = FLAG_interrupt_budget >> 4; |
| 351 } | 347 } |
| 352 __ li(a2, Operand(profiling_counter_)); | 348 __ li(a2, Operand(profiling_counter_)); |
| 353 __ li(a3, Operand(Smi::FromInt(reset_value))); | 349 __ li(a3, Operand(Smi::FromInt(reset_value))); |
| 354 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 350 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
| 355 } | 351 } |
| 356 | 352 |
| 357 | 353 |
| 358 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 354 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
| 359 Label* back_edge_target) { | 355 Label* back_edge_target) { |
| 360 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need | 356 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need |
| 361 // to make sure it is constant. Branch may emit a skip-or-jump sequence | 357 // to make sure it is constant. Branch may emit a skip-or-jump sequence |
| 362 // instead of the normal Branch. It seems that the "skip" part of that | 358 // instead of the normal Branch. It seems that the "skip" part of that |
| 363 // sequence is about as long as this Branch would be so it is safe to ignore | 359 // sequence is about as long as this Branch would be so it is safe to ignore |
| 364 // that. | 360 // that. |
| 365 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 361 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 366 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 362 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
| 367 Label ok; | 363 Label ok; |
| 368 int weight = 1; | 364 ASSERT(back_edge_target->is_bound()); |
| 369 if (FLAG_weighted_back_edges) { | 365 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
| 370 ASSERT(back_edge_target->is_bound()); | 366 int weight = Min(kMaxBackEdgeWeight, |
| 371 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 367 Max(1, distance / kCodeSizeMultiplier)); |
| 372 weight = Min(kMaxBackEdgeWeight, | |
| 373 Max(1, distance / kCodeSizeMultiplier)); | |
| 374 } | |
| 375 EmitProfilingCounterDecrement(weight); | 368 EmitProfilingCounterDecrement(weight); |
| 376 __ slt(at, a3, zero_reg); | 369 __ slt(at, a3, zero_reg); |
| 377 __ beq(at, zero_reg, &ok); | 370 __ beq(at, zero_reg, &ok); |
| 378 // Call will emit a li t9 first, so it is safe to use the delay slot. | 371 // Call will emit a li t9 first, so it is safe to use the delay slot. |
| 379 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 372 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 380 // Record a mapping of this PC offset to the OSR id. This is used to find | 373 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 381 // the AST id from the unoptimized code in order to use it as a key into | 374 // the AST id from the unoptimized code in order to use it as a key into |
| 382 // the deoptimization input data found in the optimized code. | 375 // the deoptimization input data found in the optimized code. |
| 383 RecordBackEdge(stmt->OsrEntryId()); | 376 RecordBackEdge(stmt->OsrEntryId()); |
| 384 EmitProfilingCounterReset(); | 377 EmitProfilingCounterReset(); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 397 if (return_label_.is_bound()) { | 390 if (return_label_.is_bound()) { |
| 398 __ Branch(&return_label_); | 391 __ Branch(&return_label_); |
| 399 } else { | 392 } else { |
| 400 __ bind(&return_label_); | 393 __ bind(&return_label_); |
| 401 if (FLAG_trace) { | 394 if (FLAG_trace) { |
| 402 // Push the return value on the stack as the parameter. | 395 // Push the return value on the stack as the parameter. |
| 403 // Runtime::TraceExit returns its parameter in v0. | 396 // Runtime::TraceExit returns its parameter in v0. |
| 404 __ push(v0); | 397 __ push(v0); |
| 405 __ CallRuntime(Runtime::kTraceExit, 1); | 398 __ CallRuntime(Runtime::kTraceExit, 1); |
| 406 } | 399 } |
| 407 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { | 400 // Pretend that the exit is a backwards jump to the entry. |
| 408 // Pretend that the exit is a backwards jump to the entry. | 401 int weight = 1; |
| 409 int weight = 1; | 402 if (info_->ShouldSelfOptimize()) { |
| 410 if (info_->ShouldSelfOptimize()) { | 403 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 411 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 404 } else { |
| 412 } else if (FLAG_weighted_back_edges) { | 405 int distance = masm_->pc_offset(); |
| 413 int distance = masm_->pc_offset(); | 406 weight = Min(kMaxBackEdgeWeight, |
| 414 weight = Min(kMaxBackEdgeWeight, | 407 Max(1, distance / kCodeSizeMultiplier)); |
| 415 Max(1, distance / kCodeSizeMultiplier)); | |
| 416 } | |
| 417 EmitProfilingCounterDecrement(weight); | |
| 418 Label ok; | |
| 419 __ Branch(&ok, ge, a3, Operand(zero_reg)); | |
| 420 __ push(v0); | |
| 421 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | |
| 422 __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 423 __ push(a2); | |
| 424 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | |
| 425 } else { | |
| 426 __ Call(isolate()->builtins()->InterruptCheck(), | |
| 427 RelocInfo::CODE_TARGET); | |
| 428 } | |
| 429 __ pop(v0); | |
| 430 EmitProfilingCounterReset(); | |
| 431 __ bind(&ok); | |
| 432 } | 408 } |
| 409 EmitProfilingCounterDecrement(weight); |
| 410 Label ok; |
| 411 __ Branch(&ok, ge, a3, Operand(zero_reg)); |
| 412 __ push(v0); |
| 413 __ Call(isolate()->builtins()->InterruptCheck(), |
| 414 RelocInfo::CODE_TARGET); |
| 415 __ pop(v0); |
| 416 EmitProfilingCounterReset(); |
| 417 __ bind(&ok); |
| 433 | 418 |
| 434 #ifdef DEBUG | 419 #ifdef DEBUG |
| 435 // Add a label for checking the size of the code used for returning. | 420 // Add a label for checking the size of the code used for returning. |
| 436 Label check_exit_codesize; | 421 Label check_exit_codesize; |
| 437 masm_->bind(&check_exit_codesize); | 422 masm_->bind(&check_exit_codesize); |
| 438 #endif | 423 #endif |
| 439 // Make sure that the constant pool is not emitted inside of the return | 424 // Make sure that the constant pool is not emitted inside of the return |
| 440 // sequence. | 425 // sequence. |
| 441 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 426 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 442 // Here we use masm_-> instead of the __ macro to avoid the code coverage | 427 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| (...skipping 4534 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4977 Assembler::target_address_at(pc_immediate_load_address)) == | 4962 Assembler::target_address_at(pc_immediate_load_address)) == |
| 4978 reinterpret_cast<uint32_t>( | 4963 reinterpret_cast<uint32_t>( |
| 4979 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4964 isolate->builtins()->OsrAfterStackCheck()->entry())); |
| 4980 return OSR_AFTER_STACK_CHECK; | 4965 return OSR_AFTER_STACK_CHECK; |
| 4981 } | 4966 } |
| 4982 | 4967 |
| 4983 | 4968 |
| 4984 } } // namespace v8::internal | 4969 } } // namespace v8::internal |
| 4985 | 4970 |
| 4986 #endif // V8_TARGET_ARCH_MIPS | 4971 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |