OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
146 } | 146 } |
147 | 147 |
148 | 148 |
149 static void CopyObjectToObjectElements(FixedArray* from, | 149 static void CopyObjectToObjectElements(FixedArray* from, |
150 ElementsKind from_kind, | 150 ElementsKind from_kind, |
151 uint32_t from_start, | 151 uint32_t from_start, |
152 FixedArray* to, | 152 FixedArray* to, |
153 ElementsKind to_kind, | 153 ElementsKind to_kind, |
154 uint32_t to_start, | 154 uint32_t to_start, |
155 int raw_copy_size) { | 155 int raw_copy_size) { |
156 ASSERT(to->map() != HEAP->fixed_cow_array_map()); | 156 ASSERT(to->map() != HEAP->fixed_cow_array_map()); |
Michael Starzinger
2012/11/27 11:53:56
We are assuming that this function does not alloca
Toon Verwaest
2012/11/27 11:59:30
Done.
| |
157 int copy_size = raw_copy_size; | 157 int copy_size = raw_copy_size; |
158 if (raw_copy_size < 0) { | 158 if (raw_copy_size < 0) { |
159 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 159 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
160 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 160 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
161 copy_size = Min(from->length() - from_start, | 161 copy_size = Min(from->length() - from_start, |
162 to->length() - to_start); | 162 to->length() - to_start); |
163 #ifdef DEBUG | |
164 // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already | |
165 // marked with the hole. | |
166 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 163 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
167 for (int i = to_start + copy_size; i < to->length(); ++i) { | 164 int start = to_start + copy_size; |
168 ASSERT(to->get(i)->IsTheHole()); | 165 int length = to->length() - start; |
166 if (length > 0) { | |
167 Heap* heap = from->GetHeap(); | |
168 MemsetPointer(to->data_start() + start, heap->the_hole_value(), length); | |
169 } | 169 } |
170 } | 170 } |
171 #endif | |
172 } | 171 } |
173 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | 172 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && |
174 (copy_size + static_cast<int>(from_start)) <= from->length()); | 173 (copy_size + static_cast<int>(from_start)) <= from->length()); |
175 if (copy_size == 0) return; | 174 if (copy_size == 0) return; |
176 ASSERT(IsFastSmiOrObjectElementsKind(from_kind)); | 175 ASSERT(IsFastSmiOrObjectElementsKind(from_kind)); |
177 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 176 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); |
178 Address to_address = to->address() + FixedArray::kHeaderSize; | 177 Address to_address = to->address() + FixedArray::kHeaderSize; |
179 Address from_address = from->address() + FixedArray::kHeaderSize; | 178 Address from_address = from->address() + FixedArray::kHeaderSize; |
180 CopyWords(reinterpret_cast<Object**>(to_address) + to_start, | 179 CopyWords(reinterpret_cast<Object**>(to_address) + to_start, |
181 reinterpret_cast<Object**>(from_address) + from_start, | 180 reinterpret_cast<Object**>(from_address) + from_start, |
(...skipping 10 matching lines...) Expand all Loading... | |
192 } | 191 } |
193 } | 192 } |
194 | 193 |
195 | 194 |
196 static void CopyDictionaryToObjectElements(SeededNumberDictionary* from, | 195 static void CopyDictionaryToObjectElements(SeededNumberDictionary* from, |
197 uint32_t from_start, | 196 uint32_t from_start, |
198 FixedArray* to, | 197 FixedArray* to, |
199 ElementsKind to_kind, | 198 ElementsKind to_kind, |
200 uint32_t to_start, | 199 uint32_t to_start, |
201 int raw_copy_size) { | 200 int raw_copy_size) { |
202 int copy_size = raw_copy_size; | 201 int copy_size = raw_copy_size; |
Michael Starzinger
2012/11/27 11:53:56
Likewise.
Toon Verwaest
2012/11/27 11:59:30
Done.
| |
203 Heap* heap = from->GetHeap(); | 202 Heap* heap = from->GetHeap(); |
204 if (raw_copy_size < 0) { | 203 if (raw_copy_size < 0) { |
205 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 204 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
206 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 205 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
207 copy_size = from->max_number_key() + 1 - from_start; | 206 copy_size = from->max_number_key() + 1 - from_start; |
208 #ifdef DEBUG | |
209 // Fast object arrays cannot be uninitialized. Ensure they are already | |
210 // marked with the hole. | |
211 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 207 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
212 for (int i = to_start + copy_size; i < to->length(); ++i) { | 208 int start = to_start + copy_size; |
213 ASSERT(to->get(i)->IsTheHole()); | 209 int length = to->length() - start; |
210 if (length > 0) { | |
211 Heap* heap = from->GetHeap(); | |
212 MemsetPointer(to->data_start() + start, heap->the_hole_value(), length); | |
214 } | 213 } |
215 } | 214 } |
216 #endif | |
217 } | 215 } |
218 ASSERT(to != from); | 216 ASSERT(to != from); |
219 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 217 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); |
220 if (copy_size == 0) return; | 218 if (copy_size == 0) return; |
221 uint32_t to_length = to->length(); | 219 uint32_t to_length = to->length(); |
222 if (to_start + copy_size > to_length) { | 220 if (to_start + copy_size > to_length) { |
223 copy_size = to_length - to_start; | 221 copy_size = to_length - to_start; |
224 } | 222 } |
225 for (int i = 0; i < copy_size; i++) { | 223 for (int i = 0; i < copy_size; i++) { |
226 int entry = from->FindEntry(i + from_start); | 224 int entry = from->FindEntry(i + from_start); |
(...skipping 23 matching lines...) Expand all Loading... | |
250 ElementsKind to_kind, | 248 ElementsKind to_kind, |
251 uint32_t to_start, | 249 uint32_t to_start, |
252 int raw_copy_size) { | 250 int raw_copy_size) { |
253 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 251 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); |
254 int copy_size = raw_copy_size; | 252 int copy_size = raw_copy_size; |
255 if (raw_copy_size < 0) { | 253 if (raw_copy_size < 0) { |
256 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 254 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
257 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 255 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
258 copy_size = Min(from->length() - from_start, | 256 copy_size = Min(from->length() - from_start, |
259 to->length() - to_start); | 257 to->length() - to_start); |
260 #ifdef DEBUG | |
261 // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already | |
262 // marked with the hole. | |
263 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 258 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
264 for (int i = to_start + copy_size; i < to->length(); ++i) { | 259 // Also initialize the area that will be copied over since HeapNumber |
265 ASSERT(to->get(i)->IsTheHole()); | 260 // allocation below can cause an incremental marking step, requiring all |
261 // existing heap objects to be propertly initialized. | |
262 int start = to_start; | |
263 int length = to->length() - start; | |
264 if (length > 0) { | |
265 Heap* heap = from->GetHeap(); | |
266 MemsetPointer(to->data_start() + start, heap->the_hole_value(), length); | |
266 } | 267 } |
267 } | 268 } |
268 #endif | |
269 } | 269 } |
270 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | 270 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && |
271 (copy_size + static_cast<int>(from_start)) <= from->length()); | 271 (copy_size + static_cast<int>(from_start)) <= from->length()); |
272 if (copy_size == 0) return from; | 272 if (copy_size == 0) return from; |
273 for (int i = 0; i < copy_size; ++i) { | 273 for (int i = 0; i < copy_size; ++i) { |
274 if (IsFastSmiElementsKind(to_kind)) { | 274 if (IsFastSmiElementsKind(to_kind)) { |
275 UNIMPLEMENTED(); | 275 UNIMPLEMENTED(); |
276 return Failure::Exception(); | 276 return Failure::Exception(); |
277 } else { | 277 } else { |
278 MaybeObject* maybe_value = from->get(i + from_start); | 278 MaybeObject* maybe_value = from->get(i + from_start); |
(...skipping 1576 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1855 if (!maybe_obj->To(&new_backing_store)) return maybe_obj; | 1855 if (!maybe_obj->To(&new_backing_store)) return maybe_obj; |
1856 new_backing_store->set(0, length); | 1856 new_backing_store->set(0, length); |
1857 { MaybeObject* result = array->SetContent(new_backing_store); | 1857 { MaybeObject* result = array->SetContent(new_backing_store); |
1858 if (result->IsFailure()) return result; | 1858 if (result->IsFailure()) return result; |
1859 } | 1859 } |
1860 return array; | 1860 return array; |
1861 } | 1861 } |
1862 | 1862 |
1863 | 1863 |
1864 } } // namespace v8::internal | 1864 } } // namespace v8::internal |
OLD | NEW |