OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1513 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1524 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length); | 1524 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length); |
1525 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length, | 1525 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length, |
1526 PretenureFlag pretenure); | 1526 PretenureFlag pretenure); |
1527 | 1527 |
1528 // Predicate that governs global pre-tenuring decisions based on observed | 1528 // Predicate that governs global pre-tenuring decisions based on observed |
1529 // promotion rates of previous collections. | 1529 // promotion rates of previous collections. |
1530 inline bool ShouldGloballyPretenure() { | 1530 inline bool ShouldGloballyPretenure() { |
1531 return new_space_high_promotion_mode_active_; | 1531 return new_space_high_promotion_mode_active_; |
1532 } | 1532 } |
1533 | 1533 |
| 1534 inline Address* NewSpaceHighPromotionModeActiveAddress() { |
| 1535 return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_); |
| 1536 } |
| 1537 |
1534 inline intptr_t PromotedTotalSize() { | 1538 inline intptr_t PromotedTotalSize() { |
1535 return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); | 1539 return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); |
1536 } | 1540 } |
1537 | 1541 |
1538 // True if we have reached the allocation limit in the old generation that | 1542 // True if we have reached the allocation limit in the old generation that |
1539 // should force the next GC (caused normally) to be a full one. | 1543 // should force the next GC (caused normally) to be a full one. |
1540 inline bool OldGenerationPromotionLimitReached() { | 1544 inline bool OldGenerationPromotionLimitReached() { |
1541 return PromotedTotalSize() > old_gen_promotion_limit_; | 1545 return PromotedTotalSize() > old_gen_promotion_limit_; |
1542 } | 1546 } |
1543 | 1547 |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1602 STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex); | 1606 STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex); |
1603 STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex); | 1607 STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex); |
1604 STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex); | 1608 STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex); |
1605 STATIC_CHECK(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); | 1609 STATIC_CHECK(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); |
1606 | 1610 |
1607 // Generated code can embed direct references to non-writable roots if | 1611 // Generated code can embed direct references to non-writable roots if |
1608 // they are in new space. | 1612 // they are in new space. |
1609 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index); | 1613 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index); |
1610 | 1614 |
1611 MUST_USE_RESULT MaybeObject* NumberToString( | 1615 MUST_USE_RESULT MaybeObject* NumberToString( |
1612 Object* number, bool check_number_string_cache = true); | 1616 Object* number, bool check_number_string_cache = true, |
| 1617 PretenureFlag pretenure = NOT_TENURED); |
1613 MUST_USE_RESULT MaybeObject* Uint32ToString( | 1618 MUST_USE_RESULT MaybeObject* Uint32ToString( |
1614 uint32_t value, bool check_number_string_cache = true); | 1619 uint32_t value, bool check_number_string_cache = true); |
1615 | 1620 |
1616 Map* MapForExternalArrayType(ExternalArrayType array_type); | 1621 Map* MapForExternalArrayType(ExternalArrayType array_type); |
1617 RootListIndex RootIndexForExternalArrayType( | 1622 RootListIndex RootIndexForExternalArrayType( |
1618 ExternalArrayType array_type); | 1623 ExternalArrayType array_type); |
1619 | 1624 |
1620 void RecordStats(HeapStats* stats, bool take_snapshot = false); | 1625 void RecordStats(HeapStats* stats, bool take_snapshot = false); |
1621 | 1626 |
1622 // Copy block of memory from src to dst. Size of block should be aligned | 1627 // Copy block of memory from src to dst. Size of block should be aligned |
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1969 // remain until the next failure and garbage collection. | 1974 // remain until the next failure and garbage collection. |
1970 int allocation_timeout_; | 1975 int allocation_timeout_; |
1971 | 1976 |
1972 // Do we expect to be able to handle allocation failure at this | 1977 // Do we expect to be able to handle allocation failure at this |
1973 // time? | 1978 // time? |
1974 bool disallow_allocation_failure_; | 1979 bool disallow_allocation_failure_; |
1975 #endif // DEBUG | 1980 #endif // DEBUG |
1976 | 1981 |
1977 // Indicates that the new space should be kept small due to high promotion | 1982 // Indicates that the new space should be kept small due to high promotion |
1978 // rates caused by the mutator allocating a lot of long-lived objects. | 1983 // rates caused by the mutator allocating a lot of long-lived objects. |
1979 bool new_space_high_promotion_mode_active_; | 1984 intptr_t new_space_high_promotion_mode_active_; |
1980 | 1985 |
1981 // Limit that triggers a global GC on the next (normally caused) GC. This | 1986 // Limit that triggers a global GC on the next (normally caused) GC. This |
1982 // is checked when we have already decided to do a GC to help determine | 1987 // is checked when we have already decided to do a GC to help determine |
1983 // which collector to invoke. | 1988 // which collector to invoke. |
1984 intptr_t old_gen_promotion_limit_; | 1989 intptr_t old_gen_promotion_limit_; |
1985 | 1990 |
1986 // Limit that triggers a global GC as soon as is reasonable. This is | 1991 // Limit that triggers a global GC as soon as is reasonable. This is |
1987 // checked before expanding a paged space in the old generation and on | 1992 // checked before expanding a paged space in the old generation and on |
1988 // every allocation in large object space. | 1993 // every allocation in large object space. |
1989 intptr_t old_gen_allocation_limit_; | 1994 intptr_t old_gen_allocation_limit_; |
(...skipping 1078 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3068 AssertNoAllocation no_alloc; // i.e. no gc allowed. | 3073 AssertNoAllocation no_alloc; // i.e. no gc allowed. |
3069 | 3074 |
3070 private: | 3075 private: |
3071 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 3076 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
3072 }; | 3077 }; |
3073 #endif // DEBUG | 3078 #endif // DEBUG |
3074 | 3079 |
3075 } } // namespace v8::internal | 3080 } } // namespace v8::internal |
3076 | 3081 |
3077 #endif // V8_HEAP_H_ | 3082 #endif // V8_HEAP_H_ |
OLD | NEW |