Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(389)

Side by Side Diff: src/objects.cc

Issue 1004623003: Revert of Reland of Remove slots that point to unboxed doubles from the StoreBuffer/SlotsBuffer. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/store-buffer.cc ('k') | test/cctest/test-unboxed-doubles.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <iomanip> 5 #include <iomanip>
6 #include <sstream> 6 #include <sstream>
7 7
8 #include "src/v8.h" 8 #include "src/v8.h"
9 9
10 #include "src/accessors.h" 10 #include "src/accessors.h"
(...skipping 1904 matching lines...) Expand 10 before | Expand all | Expand 10 after
1915 // For slow-to-fast migrations JSObject::TransformToFastProperties() 1915 // For slow-to-fast migrations JSObject::TransformToFastProperties()
1916 // must be used instead. 1916 // must be used instead.
1917 CHECK(new_map->is_dictionary_map()); 1917 CHECK(new_map->is_dictionary_map());
1918 1918
1919 // Slow-to-slow migration is trivial. 1919 // Slow-to-slow migration is trivial.
1920 object->set_map(*new_map); 1920 object->set_map(*new_map);
1921 } 1921 }
1922 } 1922 }
1923 1923
1924 1924
1925 // Returns true if during migration from |old_map| to |new_map| "tagged"
1926 // inobject fields are going to be replaced with unboxed double fields.
1927 static bool ShouldClearSlotsRecorded(Map* old_map, Map* new_map,
1928 int new_number_of_fields) {
1929 DisallowHeapAllocation no_gc;
1930 int inobject = new_map->inobject_properties();
1931 DCHECK(inobject <= old_map->inobject_properties());
1932
1933 int limit = Min(inobject, new_number_of_fields);
1934 for (int i = 0; i < limit; i++) {
1935 FieldIndex index = FieldIndex::ForPropertyIndex(new_map, i);
1936 if (new_map->IsUnboxedDoubleField(index) &&
1937 !old_map->IsUnboxedDoubleField(index)) {
1938 return true;
1939 }
1940 }
1941 return false;
1942 }
1943
1944
1945 static void RemoveOldToOldSlotsRecorded(Heap* heap, JSObject* object,
1946 FieldIndex index) {
1947 DisallowHeapAllocation no_gc;
1948
1949 Object* old_value = object->RawFastPropertyAt(index);
1950 if (old_value->IsHeapObject()) {
1951 HeapObject* ho = HeapObject::cast(old_value);
1952 if (heap->InNewSpace(ho)) {
1953 // At this point there must be no old-to-new slots recorded for this
1954 // object.
1955 SLOW_DCHECK(
1956 !heap->store_buffer()->CellIsInStoreBuffer(reinterpret_cast<Address>(
1957 HeapObject::RawField(object, index.offset()))));
1958 } else {
1959 Page* p = Page::FromAddress(reinterpret_cast<Address>(ho));
1960 if (p->IsEvacuationCandidate()) {
1961 Object** slot = HeapObject::RawField(object, index.offset());
1962 SlotsBuffer::RemoveSlot(p->slots_buffer(), slot);
1963 }
1964 }
1965 }
1966 }
1967
1968
1969 // To migrate a fast instance to a fast map: 1925 // To migrate a fast instance to a fast map:
1970 // - First check whether the instance needs to be rewritten. If not, simply 1926 // - First check whether the instance needs to be rewritten. If not, simply
1971 // change the map. 1927 // change the map.
1972 // - Otherwise, allocate a fixed array large enough to hold all fields, in 1928 // - Otherwise, allocate a fixed array large enough to hold all fields, in
1973 // addition to unused space. 1929 // addition to unused space.
1974 // - Copy all existing properties in, in the following order: backing store 1930 // - Copy all existing properties in, in the following order: backing store
1975 // properties, unused fields, inobject properties. 1931 // properties, unused fields, inobject properties.
1976 // - If all allocation succeeded, commit the state atomically: 1932 // - If all allocation succeeded, commit the state atomically:
1977 // * Copy inobject properties from the backing store back into the object. 1933 // * Copy inobject properties from the backing store back into the object.
1978 // * Trim the difference in instance size of the object. This also cleanly 1934 // * Trim the difference in instance size of the object. This also cleanly
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
2112 value = isolate->factory()->uninitialized_value(); 2068 value = isolate->factory()->uninitialized_value();
2113 } 2069 }
2114 int target_index = new_descriptors->GetFieldIndex(i) - inobject; 2070 int target_index = new_descriptors->GetFieldIndex(i) - inobject;
2115 if (target_index < 0) target_index += total_size; 2071 if (target_index < 0) target_index += total_size;
2116 array->set(target_index, *value); 2072 array->set(target_index, *value);
2117 } 2073 }
2118 2074
2119 // From here on we cannot fail and we shouldn't GC anymore. 2075 // From here on we cannot fail and we shouldn't GC anymore.
2120 DisallowHeapAllocation no_allocation; 2076 DisallowHeapAllocation no_allocation;
2121 2077
2122 Heap* heap = isolate->heap();
2123
2124 // If we are going to put an unboxed double to the field that used to
2125 // contain HeapObject we should ensure that this slot is removed from
2126 // both StoreBuffer and respective SlotsBuffer.
2127 bool clear_slots_recorded =
2128 FLAG_unbox_double_fields && !heap->InNewSpace(object->address()) &&
2129 ShouldClearSlotsRecorded(*old_map, *new_map, number_of_fields);
2130 if (clear_slots_recorded) {
2131 Address obj_address = object->address();
2132 Address start_address = obj_address + JSObject::kHeaderSize;
2133 Address end_address = obj_address + old_map->instance_size();
2134 heap->store_buffer()->RemoveSlots(start_address, end_address);
2135 }
2136
2137 // Copy (real) inobject properties. If necessary, stop at number_of_fields to 2078 // Copy (real) inobject properties. If necessary, stop at number_of_fields to
2138 // avoid overwriting |one_pointer_filler_map|. 2079 // avoid overwriting |one_pointer_filler_map|.
2139 int limit = Min(inobject, number_of_fields); 2080 int limit = Min(inobject, number_of_fields);
2140 for (int i = 0; i < limit; i++) { 2081 for (int i = 0; i < limit; i++) {
2141 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i); 2082 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
2142 Object* value = array->get(external + i); 2083 Object* value = array->get(external + i);
2084 // Can't use JSObject::FastPropertyAtPut() because proper map was not set
2085 // yet.
2143 if (new_map->IsUnboxedDoubleField(index)) { 2086 if (new_map->IsUnboxedDoubleField(index)) {
2144 DCHECK(value->IsMutableHeapNumber()); 2087 DCHECK(value->IsMutableHeapNumber());
2145 if (clear_slots_recorded && !old_map->IsUnboxedDoubleField(index)) {
2146 RemoveOldToOldSlotsRecorded(heap, *object, index);
2147 }
2148 object->RawFastDoublePropertyAtPut(index, 2088 object->RawFastDoublePropertyAtPut(index,
2149 HeapNumber::cast(value)->value()); 2089 HeapNumber::cast(value)->value());
2150 } else { 2090 } else {
2151 object->RawFastPropertyAtPut(index, value); 2091 object->RawFastPropertyAtPut(index, value);
2152 } 2092 }
2153 } 2093 }
2154 2094
2095 Heap* heap = isolate->heap();
2096
2155 // If there are properties in the new backing store, trim it to the correct 2097 // If there are properties in the new backing store, trim it to the correct
2156 // size and install the backing store into the object. 2098 // size and install the backing store into the object.
2157 if (external > 0) { 2099 if (external > 0) {
2158 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, inobject); 2100 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, inobject);
2159 object->set_properties(*array); 2101 object->set_properties(*array);
2160 } 2102 }
2161 2103
2162 // Create filler object past the new instance size. 2104 // Create filler object past the new instance size.
2163 int new_instance_size = new_map->instance_size(); 2105 int new_instance_size = new_map->instance_size();
2164 int instance_size_delta = old_map->instance_size() - new_instance_size; 2106 int instance_size_delta = old_map->instance_size() - new_instance_size;
(...skipping 14986 matching lines...) Expand 10 before | Expand all | Expand 10 after
17151 CompilationInfo* info) { 17093 CompilationInfo* info) {
17152 Handle<DependentCode> codes = DependentCode::InsertCompilationInfo( 17094 Handle<DependentCode> codes = DependentCode::InsertCompilationInfo(
17153 handle(cell->dependent_code(), info->isolate()), 17095 handle(cell->dependent_code(), info->isolate()),
17154 DependentCode::kPropertyCellChangedGroup, info->object_wrapper()); 17096 DependentCode::kPropertyCellChangedGroup, info->object_wrapper());
17155 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes); 17097 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
17156 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add( 17098 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
17157 cell, info->zone()); 17099 cell, info->zone());
17158 } 17100 }
17159 17101
17160 } } // namespace v8::internal 17102 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/store-buffer.cc ('k') | test/cctest/test-unboxed-doubles.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698