| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 327 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 327 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
| 328 __ mov(r2, Operand(profiling_counter_)); | 328 __ mov(r2, Operand(profiling_counter_)); |
| 329 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 329 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
| 330 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); | 330 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); |
| 331 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 331 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
| 332 } | 332 } |
| 333 | 333 |
| 334 | 334 |
| 335 void FullCodeGenerator::EmitProfilingCounterReset() { | 335 void FullCodeGenerator::EmitProfilingCounterReset() { |
| 336 int reset_value = FLAG_interrupt_budget; | 336 int reset_value = FLAG_interrupt_budget; |
| 337 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | |
| 338 // Self-optimization is a one-off thing: if it fails, don't try again. | |
| 339 reset_value = Smi::kMaxValue; | |
| 340 } | |
| 341 if (isolate()->IsDebuggerActive()) { | 337 if (isolate()->IsDebuggerActive()) { |
| 342 // Detect debug break requests as soon as possible. | 338 // Detect debug break requests as soon as possible. |
| 343 reset_value = FLAG_interrupt_budget >> 4; | 339 reset_value = FLAG_interrupt_budget >> 4; |
| 344 } | 340 } |
| 345 __ mov(r2, Operand(profiling_counter_)); | 341 __ mov(r2, Operand(profiling_counter_)); |
| 346 __ mov(r3, Operand(Smi::FromInt(reset_value))); | 342 __ mov(r3, Operand(Smi::FromInt(reset_value))); |
| 347 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 343 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
| 348 } | 344 } |
| 349 | 345 |
| 350 | 346 |
| 351 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 347 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
| 352 Label* back_edge_target) { | 348 Label* back_edge_target) { |
| 353 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 349 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
| 354 // Block literal pools whilst emitting back edge code. | 350 // Block literal pools whilst emitting back edge code. |
| 355 Assembler::BlockConstPoolScope block_const_pool(masm_); | 351 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 356 Label ok; | 352 Label ok; |
| 357 | 353 |
| 358 int weight = 1; | 354 ASSERT(back_edge_target->is_bound()); |
| 359 if (FLAG_weighted_back_edges) { | 355 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
| 360 ASSERT(back_edge_target->is_bound()); | 356 int weight = Min(kMaxBackEdgeWeight, |
| 361 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 357 Max(1, distance / kCodeSizeMultiplier)); |
| 362 weight = Min(kMaxBackEdgeWeight, | |
| 363 Max(1, distance / kCodeSizeMultiplier)); | |
| 364 } | |
| 365 EmitProfilingCounterDecrement(weight); | 358 EmitProfilingCounterDecrement(weight); |
| 366 __ b(pl, &ok); | 359 __ b(pl, &ok); |
| 367 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 360 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 368 | 361 |
| 369 // Record a mapping of this PC offset to the OSR id. This is used to find | 362 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 370 // the AST id from the unoptimized code in order to use it as a key into | 363 // the AST id from the unoptimized code in order to use it as a key into |
| 371 // the deoptimization input data found in the optimized code. | 364 // the deoptimization input data found in the optimized code. |
| 372 RecordBackEdge(stmt->OsrEntryId()); | 365 RecordBackEdge(stmt->OsrEntryId()); |
| 373 | 366 |
| 374 EmitProfilingCounterReset(); | 367 EmitProfilingCounterReset(); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 387 if (return_label_.is_bound()) { | 380 if (return_label_.is_bound()) { |
| 388 __ b(&return_label_); | 381 __ b(&return_label_); |
| 389 } else { | 382 } else { |
| 390 __ bind(&return_label_); | 383 __ bind(&return_label_); |
| 391 if (FLAG_trace) { | 384 if (FLAG_trace) { |
| 392 // Push the return value on the stack as the parameter. | 385 // Push the return value on the stack as the parameter. |
| 393 // Runtime::TraceExit returns its parameter in r0. | 386 // Runtime::TraceExit returns its parameter in r0. |
| 394 __ push(r0); | 387 __ push(r0); |
| 395 __ CallRuntime(Runtime::kTraceExit, 1); | 388 __ CallRuntime(Runtime::kTraceExit, 1); |
| 396 } | 389 } |
| 397 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { | 390 // Pretend that the exit is a backwards jump to the entry. |
| 398 // Pretend that the exit is a backwards jump to the entry. | 391 int weight = 1; |
| 399 int weight = 1; | 392 if (info_->ShouldSelfOptimize()) { |
| 400 if (info_->ShouldSelfOptimize()) { | 393 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 401 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 394 } else { |
| 402 } else if (FLAG_weighted_back_edges) { | 395 int distance = masm_->pc_offset(); |
| 403 int distance = masm_->pc_offset(); | 396 weight = Min(kMaxBackEdgeWeight, |
| 404 weight = Min(kMaxBackEdgeWeight, | 397 Max(1, distance / kCodeSizeMultiplier)); |
| 405 Max(1, distance / kCodeSizeMultiplier)); | |
| 406 } | |
| 407 EmitProfilingCounterDecrement(weight); | |
| 408 Label ok; | |
| 409 __ b(pl, &ok); | |
| 410 __ push(r0); | |
| 411 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | |
| 412 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 413 __ push(r2); | |
| 414 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | |
| 415 } else { | |
| 416 __ Call(isolate()->builtins()->InterruptCheck(), | |
| 417 RelocInfo::CODE_TARGET); | |
| 418 } | |
| 419 __ pop(r0); | |
| 420 EmitProfilingCounterReset(); | |
| 421 __ bind(&ok); | |
| 422 } | 398 } |
| 399 EmitProfilingCounterDecrement(weight); |
| 400 Label ok; |
| 401 __ b(pl, &ok); |
| 402 __ push(r0); |
| 403 __ Call(isolate()->builtins()->InterruptCheck(), |
| 404 RelocInfo::CODE_TARGET); |
| 405 __ pop(r0); |
| 406 EmitProfilingCounterReset(); |
| 407 __ bind(&ok); |
| 423 | 408 |
| 424 #ifdef DEBUG | 409 #ifdef DEBUG |
| 425 // Add a label for checking the size of the code used for returning. | 410 // Add a label for checking the size of the code used for returning. |
| 426 Label check_exit_codesize; | 411 Label check_exit_codesize; |
| 427 masm_->bind(&check_exit_codesize); | 412 masm_->bind(&check_exit_codesize); |
| 428 #endif | 413 #endif |
| 429 // Make sure that the constant pool is not emitted inside of the return | 414 // Make sure that the constant pool is not emitted inside of the return |
| 430 // sequence. | 415 // sequence. |
| 431 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 416 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 432 // Here we use masm_-> instead of the __ macro to avoid the code coverage | 417 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| (...skipping 4495 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4928 ASSERT(Memory::uint32_at(interrupt_address_pointer) == | 4913 ASSERT(Memory::uint32_at(interrupt_address_pointer) == |
| 4929 reinterpret_cast<uint32_t>( | 4914 reinterpret_cast<uint32_t>( |
| 4930 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4915 isolate->builtins()->OsrAfterStackCheck()->entry())); |
| 4931 return OSR_AFTER_STACK_CHECK; | 4916 return OSR_AFTER_STACK_CHECK; |
| 4932 } | 4917 } |
| 4933 | 4918 |
| 4934 | 4919 |
| 4935 } } // namespace v8::internal | 4920 } } // namespace v8::internal |
| 4936 | 4921 |
| 4937 #endif // V8_TARGET_ARCH_ARM | 4922 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |