| OLD | NEW | 
|     1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |     1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 
|     2 // Redistribution and use in source and binary forms, with or without |     2 // Redistribution and use in source and binary forms, with or without | 
|     3 // modification, are permitted provided that the following conditions are |     3 // modification, are permitted provided that the following conditions are | 
|     4 // met: |     4 // met: | 
|     5 // |     5 // | 
|     6 //     * Redistributions of source code must retain the above copyright |     6 //     * Redistributions of source code must retain the above copyright | 
|     7 //       notice, this list of conditions and the following disclaimer. |     7 //       notice, this list of conditions and the following disclaimer. | 
|     8 //     * Redistributions in binary form must reproduce the above |     8 //     * Redistributions in binary form must reproduce the above | 
|     9 //       copyright notice, this list of conditions and the following |     9 //       copyright notice, this list of conditions and the following | 
|    10 //       disclaimer in the documentation and/or other materials provided |    10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   131 }; |   131 }; | 
|   132  |   132  | 
|   133 bool Profiler::paused_ = false; |   133 bool Profiler::paused_ = false; | 
|   134  |   134  | 
|   135  |   135  | 
|   136 // |   136 // | 
|   137 // StackTracer implementation |   137 // StackTracer implementation | 
|   138 // |   138 // | 
|   139 void StackTracer::Trace(TickSample* sample) { |   139 void StackTracer::Trace(TickSample* sample) { | 
|   140   // Assuming that stack grows from lower addresses |   140   // Assuming that stack grows from lower addresses | 
|   141   if (sample->sp < sample->fp && sample->fp < low_stack_bound_) { |   141   if (sample->state != GC | 
 |   142       && (sample->sp < sample->fp && sample->fp < low_stack_bound_)) { | 
|   142     sample->InitStack(1); |   143     sample->InitStack(1); | 
|   143     sample->stack[0] = Memory::Address_at( |   144     sample->stack[0] = Memory::Address_at( | 
|   144         (Address)(sample->fp + StandardFrameConstants::kCallerPCOffset)); |   145         (Address)(sample->fp + StandardFrameConstants::kCallerPCOffset)); | 
|   145   } else { |   146   } else { | 
|   146     // FP seems to be in some intermediate state, better discard this sample |   147     // GC runs or FP seems to be in some intermediate state, | 
 |   148     // better discard this sample | 
|   147     sample->InitStack(0); |   149     sample->InitStack(0); | 
|   148   } |   150   } | 
|   149 } |   151 } | 
|   150  |   152  | 
|   151  |   153  | 
|   152 // |   154 // | 
|   153 // Ticker used to provide ticks to the profiler and the sliding state |   155 // Ticker used to provide ticks to the profiler and the sliding state | 
|   154 // window. |   156 // window. | 
|   155 // |   157 // | 
|   156 class Ticker: public Sampler { |   158 class Ticker: public Sampler { | 
| (...skipping 977 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  1134   if (FLAG_log_state_changes) { |  1136   if (FLAG_log_state_changes) { | 
|  1135     LOG(UncheckedStringEvent("Leaving", StateToString(state_))); |  1137     LOG(UncheckedStringEvent("Leaving", StateToString(state_))); | 
|  1136     if (previous_) { |  1138     if (previous_) { | 
|  1137       LOG(UncheckedStringEvent("To", StateToString(previous_->state_))); |  1139       LOG(UncheckedStringEvent("To", StateToString(previous_->state_))); | 
|  1138     } |  1140     } | 
|  1139   } |  1141   } | 
|  1140 } |  1142 } | 
|  1141 #endif |  1143 #endif | 
|  1142  |  1144  | 
|  1143 } }  // namespace v8::internal |  1145 } }  // namespace v8::internal | 
| OLD | NEW |