Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 262 static OldSpace* code_space() { return code_space_; } | 262 static OldSpace* code_space() { return code_space_; } |
| 263 static MapSpace* map_space() { return map_space_; } | 263 static MapSpace* map_space() { return map_space_; } |
| 264 static CellSpace* cell_space() { return cell_space_; } | 264 static CellSpace* cell_space() { return cell_space_; } |
| 265 static LargeObjectSpace* lo_space() { return lo_space_; } | 265 static LargeObjectSpace* lo_space() { return lo_space_; } |
| 266 | 266 |
| 267 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } | 267 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } |
| 268 static Address always_allocate_scope_depth_address() { | 268 static Address always_allocate_scope_depth_address() { |
| 269 return reinterpret_cast<Address>(&always_allocate_scope_depth_); | 269 return reinterpret_cast<Address>(&always_allocate_scope_depth_); |
| 270 } | 270 } |
| 271 static bool linear_allocation() { | 271 static bool linear_allocation() { |
| 272 return linear_allocation_scope_depth_ != 0; | 272 return linear_allocation_scope_depth_ != 0; |
| 273 } | 273 } |
| 274 | 274 |
| 275 static Address* NewSpaceAllocationTopAddress() { | 275 static Address* NewSpaceAllocationTopAddress() { |
| 276 return new_space_.allocation_top_address(); | 276 return new_space_.allocation_top_address(); |
| 277 } | 277 } |
| 278 static Address* NewSpaceAllocationLimitAddress() { | 278 static Address* NewSpaceAllocationLimitAddress() { |
| 279 return new_space_.allocation_limit_address(); | 279 return new_space_.allocation_limit_address(); |
| 280 } | 280 } |
| 281 | 281 |
| 282 // Uncommit unused semi space. | 282 // Uncommit unused semi space. |
| (...skipping 544 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 827 // Allocate unitialized fixed array (pretenure == NON_TENURE). | 827 // Allocate unitialized fixed array (pretenure == NON_TENURE). |
| 828 static Object* AllocateRawFixedArray(int length); | 828 static Object* AllocateRawFixedArray(int length); |
| 829 | 829 |
| 830 // True if we have reached the allocation limit in the old generation that | 830 // True if we have reached the allocation limit in the old generation that |
| 831 // should force the next GC (caused normally) to be a full one. | 831 // should force the next GC (caused normally) to be a full one. |
| 832 static bool OldGenerationPromotionLimitReached() { | 832 static bool OldGenerationPromotionLimitReached() { |
| 833 return (PromotedSpaceSize() + PromotedExternalMemorySize()) | 833 return (PromotedSpaceSize() + PromotedExternalMemorySize()) |
| 834 > old_gen_promotion_limit_; | 834 > old_gen_promotion_limit_; |
| 835 } | 835 } |
| 836 | 836 |
| 837 static intptr_t OldGenerationSpaceAvailable() { | |
| 838 return old_gen_allocation_limit_ - | |
| 839 (PromotedSpaceSize() + PromotedExternalMemorySize()); | |
| 840 } | |
| 841 | |
| 837 // True if we have reached the allocation limit in the old generation that | 842 // True if we have reached the allocation limit in the old generation that |
| 838 // should artificially cause a GC right now. | 843 // should artificially cause a GC right now. |
| 839 static bool OldGenerationAllocationLimitReached() { | 844 static bool OldGenerationAllocationLimitReached() { |
| 840 return (PromotedSpaceSize() + PromotedExternalMemorySize()) | 845 return OldGenerationSpaceAvailable() < 0; |
| 841 > old_gen_allocation_limit_; | |
| 842 } | 846 } |
| 843 | 847 |
| 844 // Can be called when the embedding application is idle. | 848 // Can be called when the embedding application is idle. |
| 845 static bool IdleNotification(); | 849 static bool IdleNotification(); |
| 846 | 850 |
| 847 // Declare all the root indices. | 851 // Declare all the root indices. |
| 848 enum RootListIndex { | 852 enum RootListIndex { |
| 849 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, | 853 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, |
| 850 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) | 854 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) |
| 851 #undef ROOT_INDEX_DECLARATION | 855 #undef ROOT_INDEX_DECLARATION |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1045 // Performs a major collection in the whole heap. | 1049 // Performs a major collection in the whole heap. |
| 1046 static void MarkCompact(GCTracer* tracer); | 1050 static void MarkCompact(GCTracer* tracer); |
| 1047 | 1051 |
| 1048 // Code to be run before and after mark-compact. | 1052 // Code to be run before and after mark-compact. |
| 1049 static void MarkCompactPrologue(bool is_compacting); | 1053 static void MarkCompactPrologue(bool is_compacting); |
| 1050 static void MarkCompactEpilogue(bool is_compacting); | 1054 static void MarkCompactEpilogue(bool is_compacting); |
| 1051 | 1055 |
| 1052 // Helper function used by CopyObject to copy a source object to an | 1056 // Helper function used by CopyObject to copy a source object to an |
| 1053 // allocated target object and update the forwarding pointer in the source | 1057 // allocated target object and update the forwarding pointer in the source |
| 1054 // object. Returns the target object. | 1058 // object. Returns the target object. |
| 1055 static HeapObject* MigrateObject(HeapObject* source, | 1059 static inline HeapObject* MigrateObject(HeapObject* source, |
| 1056 HeapObject* target, | 1060 HeapObject* target, |
|
Mads Ager (chromium)
2010/01/12 14:17:49
Missing space on this line and the next.
| |
| 1057 int size); | 1061 int size); |
| 1058 | 1062 |
| 1059 // Helper function that governs the promotion policy from new space to | 1063 // Helper function that governs the promotion policy from new space to |
| 1060 // old. If the object's old address lies below the new space's age | 1064 // old. If the object's old address lies below the new space's age |
| 1061 // mark or if we've already filled the bottom 1/16th of the to space, | 1065 // mark or if we've already filled the bottom 1/16th of the to space, |
| 1062 // we try to promote this object. | 1066 // we try to promote this object. |
| 1063 static inline bool ShouldBePromoted(Address old_address, int object_size); | 1067 static inline bool ShouldBePromoted(Address old_address, int object_size); |
| 1064 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) | 1068 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) |
| 1065 // Record the copy of an object in the NewSpace's statistics. | 1069 // Record the copy of an object in the NewSpace's statistics. |
| 1066 static void RecordCopiedObject(HeapObject* obj); | 1070 static void RecordCopiedObject(HeapObject* obj); |
| 1067 | 1071 |
| (...skipping 608 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1676 | 1680 |
| 1677 // To speed up scavenge collections new space string are kept | 1681 // To speed up scavenge collections new space string are kept |
| 1678 // separate from old space strings. | 1682 // separate from old space strings. |
| 1679 static List<Object*> new_space_strings_; | 1683 static List<Object*> new_space_strings_; |
| 1680 static List<Object*> old_space_strings_; | 1684 static List<Object*> old_space_strings_; |
| 1681 }; | 1685 }; |
| 1682 | 1686 |
| 1683 } } // namespace v8::internal | 1687 } } // namespace v8::internal |
| 1684 | 1688 |
| 1685 #endif // V8_HEAP_H_ | 1689 #endif // V8_HEAP_H_ |
| OLD | NEW |