| Index: runtime/vm/isolate.h
|
| diff --git a/runtime/vm/isolate.h b/runtime/vm/isolate.h
|
| index 8d2508fbd9461f131fad072bc8bd4f1390a5f9a8..350e39016819fa2a318d10fb4c19460e5cff9faa 100644
|
| --- a/runtime/vm/isolate.h
|
| +++ b/runtime/vm/isolate.h
|
| @@ -487,30 +487,40 @@ class Isolate : public BaseIsolate {
|
| all_classes_finalized_ = value;
|
| }
|
|
|
| - static const uint32_t kInvalidGen = 0;
|
| + // True during top level parsing.
|
| + bool IsTopLevelParsing() {
|
| + const intptr_t value =
|
| + AtomicOperations::LoadRelaxedIntPtr(&top_level_parsing_count_);
|
| + ASSERT(value >= 0);
|
| + return value > 0;
|
| + }
|
|
|
| - void IncrCHAInvalidationGen() {
|
| - cha_invalidation_gen_++;
|
| - if (cha_invalidation_gen_ == kInvalidGen) cha_invalidation_gen_++;
|
| + void IncrTopLevelParsingCount() {
|
| + AtomicOperations::IncrementBy(&top_level_parsing_count_, 1);
|
| + }
|
| + void DecrTopLevelParsingCount() {
|
| + AtomicOperations::DecrementBy(&top_level_parsing_count_, 1);
|
| }
|
| - void ResetCHAInvalidationGen() { cha_invalidation_gen_ = kInvalidGen; }
|
| - uint32_t cha_invalidation_gen() const { return cha_invalidation_gen_; }
|
|
|
| + static const uint32_t kInvalidGen = 0;
|
|
|
| void IncrFieldInvalidationGen() {
|
| - field_invalidation_gen_++;
|
| - if (field_invalidation_gen_ == kInvalidGen) field_invalidation_gen_++;
|
| + AtomicOperations::IncrementBy(&field_invalidation_gen_, 1);
|
| + if (field_invalidation_gen_ == kInvalidGen) {
|
| + AtomicOperations::IncrementBy(&field_invalidation_gen_, 1);
|
| + }
|
| }
|
| + intptr_t field_invalidation_gen() const { return field_invalidation_gen_; }
|
|
|
| - void ResetFieldInvalidationGen() { field_invalidation_gen_ = kInvalidGen; }
|
| - uint32_t field_invalidation_gen() const { return field_invalidation_gen_; }
|
| -
|
| - void IncrPrefixInvalidationGen() {
|
| - prefix_invalidation_gen_++;
|
| - if (prefix_invalidation_gen_ == kInvalidGen) prefix_invalidation_gen_++;
|
| + void IncrLoadingInvalidationGen() {
|
| + AtomicOperations::IncrementBy(&loading_invalidation_gen_, 1);
|
| + if (loading_invalidation_gen_ == kInvalidGen) {
|
| + AtomicOperations::IncrementBy(&loading_invalidation_gen_, 1);
|
| + }
|
| + }
|
| + intptr_t loading_invalidation_gen() {
|
| + return AtomicOperations::LoadRelaxedIntPtr(&loading_invalidation_gen_);
|
| }
|
| - void ResetPrefixInvalidationGen() { prefix_invalidation_gen_ = kInvalidGen; }
|
| - uint32_t prefix_invalidation_gen() const { return prefix_invalidation_gen_; }
|
|
|
| // Used by background compiler which field became boxed and must trigger
|
| // deoptimization in the mutator thread.
|
| @@ -730,9 +740,9 @@ class Isolate : public BaseIsolate {
|
| // Invalidation generations; used to track events occuring in parallel
|
| // to background compilation. The counters may overflow, which is OK
|
| // since we check for equality to detect if an event occured.
|
| - uint32_t cha_invalidation_gen_;
|
| - uint32_t field_invalidation_gen_;
|
| - uint32_t prefix_invalidation_gen_;
|
| + intptr_t field_invalidation_gen_;
|
| + intptr_t loading_invalidation_gen_;
|
| + intptr_t top_level_parsing_count_;
|
|
|
| // Protect access to boxed_field_list_.
|
| Mutex* boxed_field_list_mutex_;
|
|
|