| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 189 // any back edge in any unoptimized frame will trigger on-stack | 189 // any back edge in any unoptimized frame will trigger on-stack |
| 190 // replacement for that frame. | 190 // replacement for that frame. |
| 191 if (FLAG_trace_osr) { | 191 if (FLAG_trace_osr) { |
| 192 PrintF("[patching stack checks in "); | 192 PrintF("[patching stack checks in "); |
| 193 function->PrintName(); | 193 function->PrintName(); |
| 194 PrintF(" for on-stack replacement]\n"); | 194 PrintF(" for on-stack replacement]\n"); |
| 195 } | 195 } |
| 196 | 196 |
| 197 // Get the stack check stub code object to match against. We aren't | 197 // Get the stack check stub code object to match against. We aren't |
| 198 // prepared to generate it, but we don't expect to have to. | 198 // prepared to generate it, but we don't expect to have to. |
| 199 bool found_code = false; | |
| 200 Code* stack_check_code = NULL; | 199 Code* stack_check_code = NULL; |
| 201 if (FLAG_count_based_interrupts) { | 200 InterruptStub interrupt_stub; |
| 202 InterruptStub interrupt_stub; | 201 bool found_code = interrupt_stub.FindCodeInCache(&stack_check_code, isolate_); |
| 203 found_code = interrupt_stub.FindCodeInCache(&stack_check_code, isolate_); | |
| 204 } else // NOLINT | |
| 205 { // NOLINT | |
| 206 StackCheckStub check_stub; | |
| 207 found_code = check_stub.FindCodeInCache(&stack_check_code, isolate_); | |
| 208 } | |
| 209 if (found_code) { | 202 if (found_code) { |
| 210 Code* replacement_code = | 203 Code* replacement_code = |
| 211 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); | 204 isolate_->builtins()->builtin(Builtins::kOnStackReplacement); |
| 212 Code* unoptimized_code = shared->code(); | 205 Code* unoptimized_code = shared->code(); |
| 213 Deoptimizer::PatchStackCheckCode(unoptimized_code, | 206 Deoptimizer::PatchStackCheckCode(unoptimized_code, |
| 214 stack_check_code, | 207 stack_check_code, |
| 215 replacement_code); | 208 replacement_code); |
| 216 } | 209 } |
| 217 } | 210 } |
| 218 | 211 |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 372 // Add the collected functions as samples. It's important not to do | 365 // Add the collected functions as samples. It's important not to do |
| 373 // this as part of collecting them because this will interfere with | 366 // this as part of collecting them because this will interfere with |
| 374 // the sample lookup in case of recursive functions. | 367 // the sample lookup in case of recursive functions. |
| 375 for (int i = 0; i < sample_count; i++) { | 368 for (int i = 0; i < sample_count; i++) { |
| 376 AddSample(samples[i], kSamplerFrameWeight[i]); | 369 AddSample(samples[i], kSamplerFrameWeight[i]); |
| 377 } | 370 } |
| 378 } | 371 } |
| 379 } | 372 } |
| 380 | 373 |
| 381 | 374 |
| 382 void RuntimeProfiler::NotifyTick() { | |
| 383 if (FLAG_count_based_interrupts) return; | |
| 384 isolate_->stack_guard()->RequestRuntimeProfilerTick(); | |
| 385 } | |
| 386 | |
| 387 | |
| 388 void RuntimeProfiler::SetUp() { | 375 void RuntimeProfiler::SetUp() { |
| 389 ASSERT(has_been_globally_set_up_); | 376 ASSERT(has_been_globally_set_up_); |
| 390 if (!FLAG_watch_ic_patching) { | 377 if (!FLAG_watch_ic_patching) { |
| 391 ClearSampleBuffer(); | 378 ClearSampleBuffer(); |
| 392 } | 379 } |
| 393 // If the ticker hasn't already started, make sure to do so to get | 380 // If the ticker hasn't already started, make sure to do so to get |
| 394 // the ticks for the runtime profiler. | 381 // the ticks for the runtime profiler. |
| 395 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); | 382 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted(); |
| 396 } | 383 } |
| 397 | 384 |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 499 | 486 |
| 500 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { | 487 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() { |
| 501 if (!RuntimeProfiler::IsSomeIsolateInJS()) { | 488 if (!RuntimeProfiler::IsSomeIsolateInJS()) { |
| 502 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); | 489 return RuntimeProfiler::WaitForSomeIsolateToEnterJS(); |
| 503 } | 490 } |
| 504 return false; | 491 return false; |
| 505 } | 492 } |
| 506 | 493 |
| 507 | 494 |
| 508 } } // namespace v8::internal | 495 } } // namespace v8::internal |
| OLD | NEW |