Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(395)

Side by Side Diff: src/spaces.cc

Issue 227533006: Synchronize store buffer processing and concurrent sweeping. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/objects-inl.h ('k') | src/store-buffer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1999 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 ASSERT(IsAligned(size_in_bytes, kPointerSize)); 2010 ASSERT(IsAligned(size_in_bytes, kPointerSize));
2011 2011
2012 // We write a map and possibly size information to the block. If the block 2012 // We write a map and possibly size information to the block. If the block
2013 // is big enough to be a FreeSpace with at least one extra word (the next 2013 // is big enough to be a FreeSpace with at least one extra word (the next
2014 // pointer), we set its map to be the free space map and its size to an 2014 // pointer), we set its map to be the free space map and its size to an
2015 // appropriate array length for the desired size from HeapObject::Size(). 2015 // appropriate array length for the desired size from HeapObject::Size().
2016 // If the block is too small (eg, one or two words), to hold both a size 2016 // If the block is too small (eg, one or two words), to hold both a size
2017 // field and a next pointer, we give it a filler map that gives it the 2017 // field and a next pointer, we give it a filler map that gives it the
2018 // correct size. 2018 // correct size.
2019 if (size_in_bytes > FreeSpace::kHeaderSize) { 2019 if (size_in_bytes > FreeSpace::kHeaderSize) {
2020 set_map_no_write_barrier(heap->raw_unchecked_free_space_map());
2021 // Can't use FreeSpace::cast because it fails during deserialization. 2020 // Can't use FreeSpace::cast because it fails during deserialization.
2021 // We have to set the size first with a release store before we store
2022 // the map because a concurrent store buffer scan on scavenge must not
2023 // observe a map with an invalid size.
2022 FreeSpace* this_as_free_space = reinterpret_cast<FreeSpace*>(this); 2024 FreeSpace* this_as_free_space = reinterpret_cast<FreeSpace*>(this);
2023 this_as_free_space->set_size(size_in_bytes); 2025 this_as_free_space->nobarrier_set_size(size_in_bytes);
2026 synchronized_set_map_no_write_barrier(heap->raw_unchecked_free_space_map());
2024 } else if (size_in_bytes == kPointerSize) { 2027 } else if (size_in_bytes == kPointerSize) {
2025 set_map_no_write_barrier(heap->raw_unchecked_one_pointer_filler_map()); 2028 set_map_no_write_barrier(heap->raw_unchecked_one_pointer_filler_map());
2026 } else if (size_in_bytes == 2 * kPointerSize) { 2029 } else if (size_in_bytes == 2 * kPointerSize) {
2027 set_map_no_write_barrier(heap->raw_unchecked_two_pointer_filler_map()); 2030 set_map_no_write_barrier(heap->raw_unchecked_two_pointer_filler_map());
2028 } else { 2031 } else {
2029 UNREACHABLE(); 2032 UNREACHABLE();
2030 } 2033 }
2031 // We would like to ASSERT(Size() == size_in_bytes) but this would fail during 2034 // We would like to ASSERT(Size() == size_in_bytes) but this would fail during
2032 // deserialization because the free space map is not done yet. 2035 // deserialization because the free space map is not done yet.
2033 } 2036 }
(...skipping 23 matching lines...) Expand all
2057 } 2060 }
2058 2061
2059 2062
2060 void FreeListNode::set_next(FreeListNode* next) { 2063 void FreeListNode::set_next(FreeListNode* next) {
2061 ASSERT(IsFreeListNode(this)); 2064 ASSERT(IsFreeListNode(this));
2062 // While we are booting the VM the free space map will actually be null. So 2065 // While we are booting the VM the free space map will actually be null. So
2063 // we have to make sure that we don't try to use it for anything at that 2066 // we have to make sure that we don't try to use it for anything at that
2064 // stage. 2067 // stage.
2065 if (map() == GetHeap()->raw_unchecked_free_space_map()) { 2068 if (map() == GetHeap()->raw_unchecked_free_space_map()) {
2066 ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize); 2069 ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize);
2067 Memory::Address_at(address() + kNextOffset) = 2070 NoBarrier_Store(reinterpret_cast<AtomicWord*>(address() + kNextOffset),
2068 reinterpret_cast<Address>(next); 2071 reinterpret_cast<AtomicWord>(next));
2069 } else { 2072 } else {
2070 Memory::Address_at(address() + kPointerSize) = 2073 NoBarrier_Store(reinterpret_cast<AtomicWord*>(address() + kPointerSize),
2071 reinterpret_cast<Address>(next); 2074 reinterpret_cast<AtomicWord>(next));
2072 } 2075 }
2073 } 2076 }
2074 2077
2075 2078
2076 intptr_t FreeListCategory::Concatenate(FreeListCategory* category) { 2079 intptr_t FreeListCategory::Concatenate(FreeListCategory* category) {
2077 intptr_t free_bytes = 0; 2080 intptr_t free_bytes = 0;
2078 if (category->top() != NULL) { 2081 if (category->top() != NULL) {
2079 // This is safe (not going to deadlock) since Concatenate operations 2082 // This is safe (not going to deadlock) since Concatenate operations
2080 // are never performed on the same free lists at the same time in 2083 // are never performed on the same free lists at the same time in
2081 // reverse order. 2084 // reverse order.
(...skipping 1117 matching lines...) Expand 10 before | Expand all | Expand 10 after
3199 object->ShortPrint(); 3202 object->ShortPrint();
3200 PrintF("\n"); 3203 PrintF("\n");
3201 } 3204 }
3202 printf(" --------------------------------------\n"); 3205 printf(" --------------------------------------\n");
3203 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); 3206 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes());
3204 } 3207 }
3205 3208
3206 #endif // DEBUG 3209 #endif // DEBUG
3207 3210
3208 } } // namespace v8::internal 3211 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/objects-inl.h ('k') | src/store-buffer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698