| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/arguments.h" | 7 #include "src/arguments.h" |
| 8 #include "src/conversions.h" | 8 #include "src/conversions.h" |
| 9 #include "src/elements.h" | 9 #include "src/elements.h" |
| 10 #include "src/objects.h" | 10 #include "src/objects.h" |
| (...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 146 HandleVector<Object>(NULL, 0))); | 146 HandleVector<Object>(NULL, 0))); |
| 147 } | 147 } |
| 148 | 148 |
| 149 | 149 |
| 150 static void CopyObjectToObjectElements(FixedArrayBase* from_base, | 150 static void CopyObjectToObjectElements(FixedArrayBase* from_base, |
| 151 ElementsKind from_kind, | 151 ElementsKind from_kind, |
| 152 uint32_t from_start, | 152 uint32_t from_start, |
| 153 FixedArrayBase* to_base, | 153 FixedArrayBase* to_base, |
| 154 ElementsKind to_kind, uint32_t to_start, | 154 ElementsKind to_kind, uint32_t to_start, |
| 155 int raw_copy_size) { | 155 int raw_copy_size) { |
| 156 ASSERT(to_base->map() != | 156 DCHECK(to_base->map() != |
| 157 from_base->GetIsolate()->heap()->fixed_cow_array_map()); | 157 from_base->GetIsolate()->heap()->fixed_cow_array_map()); |
| 158 DisallowHeapAllocation no_allocation; | 158 DisallowHeapAllocation no_allocation; |
| 159 int copy_size = raw_copy_size; | 159 int copy_size = raw_copy_size; |
| 160 if (raw_copy_size < 0) { | 160 if (raw_copy_size < 0) { |
| 161 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 161 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 162 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 162 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 163 copy_size = Min(from_base->length() - from_start, | 163 copy_size = Min(from_base->length() - from_start, |
| 164 to_base->length() - to_start); | 164 to_base->length() - to_start); |
| 165 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 165 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 166 int start = to_start + copy_size; | 166 int start = to_start + copy_size; |
| 167 int length = to_base->length() - start; | 167 int length = to_base->length() - start; |
| 168 if (length > 0) { | 168 if (length > 0) { |
| 169 Heap* heap = from_base->GetHeap(); | 169 Heap* heap = from_base->GetHeap(); |
| 170 MemsetPointer(FixedArray::cast(to_base)->data_start() + start, | 170 MemsetPointer(FixedArray::cast(to_base)->data_start() + start, |
| 171 heap->the_hole_value(), length); | 171 heap->the_hole_value(), length); |
| 172 } | 172 } |
| 173 } | 173 } |
| 174 } | 174 } |
| 175 ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && | 175 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
| 176 (copy_size + static_cast<int>(from_start)) <= from_base->length()); | 176 (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
| 177 if (copy_size == 0) return; | 177 if (copy_size == 0) return; |
| 178 FixedArray* from = FixedArray::cast(from_base); | 178 FixedArray* from = FixedArray::cast(from_base); |
| 179 FixedArray* to = FixedArray::cast(to_base); | 179 FixedArray* to = FixedArray::cast(to_base); |
| 180 ASSERT(IsFastSmiOrObjectElementsKind(from_kind)); | 180 DCHECK(IsFastSmiOrObjectElementsKind(from_kind)); |
| 181 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 181 DCHECK(IsFastSmiOrObjectElementsKind(to_kind)); |
| 182 Address to_address = to->address() + FixedArray::kHeaderSize; | 182 Address to_address = to->address() + FixedArray::kHeaderSize; |
| 183 Address from_address = from->address() + FixedArray::kHeaderSize; | 183 Address from_address = from->address() + FixedArray::kHeaderSize; |
| 184 CopyWords(reinterpret_cast<Object**>(to_address) + to_start, | 184 CopyWords(reinterpret_cast<Object**>(to_address) + to_start, |
| 185 reinterpret_cast<Object**>(from_address) + from_start, | 185 reinterpret_cast<Object**>(from_address) + from_start, |
| 186 static_cast<size_t>(copy_size)); | 186 static_cast<size_t>(copy_size)); |
| 187 if (IsFastObjectElementsKind(from_kind) && | 187 if (IsFastObjectElementsKind(from_kind) && |
| 188 IsFastObjectElementsKind(to_kind)) { | 188 IsFastObjectElementsKind(to_kind)) { |
| 189 Heap* heap = from->GetHeap(); | 189 Heap* heap = from->GetHeap(); |
| 190 if (!heap->InNewSpace(to)) { | 190 if (!heap->InNewSpace(to)) { |
| 191 heap->RecordWrites(to->address(), | 191 heap->RecordWrites(to->address(), |
| 192 to->OffsetOfElementAt(to_start), | 192 to->OffsetOfElementAt(to_start), |
| 193 copy_size); | 193 copy_size); |
| 194 } | 194 } |
| 195 heap->incremental_marking()->RecordWrites(to); | 195 heap->incremental_marking()->RecordWrites(to); |
| 196 } | 196 } |
| 197 } | 197 } |
| 198 | 198 |
| 199 | 199 |
| 200 static void CopyDictionaryToObjectElements( | 200 static void CopyDictionaryToObjectElements( |
| 201 FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, | 201 FixedArrayBase* from_base, uint32_t from_start, FixedArrayBase* to_base, |
| 202 ElementsKind to_kind, uint32_t to_start, int raw_copy_size) { | 202 ElementsKind to_kind, uint32_t to_start, int raw_copy_size) { |
| 203 DisallowHeapAllocation no_allocation; | 203 DisallowHeapAllocation no_allocation; |
| 204 SeededNumberDictionary* from = SeededNumberDictionary::cast(from_base); | 204 SeededNumberDictionary* from = SeededNumberDictionary::cast(from_base); |
| 205 int copy_size = raw_copy_size; | 205 int copy_size = raw_copy_size; |
| 206 Heap* heap = from->GetHeap(); | 206 Heap* heap = from->GetHeap(); |
| 207 if (raw_copy_size < 0) { | 207 if (raw_copy_size < 0) { |
| 208 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 208 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 209 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 209 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 210 copy_size = from->max_number_key() + 1 - from_start; | 210 copy_size = from->max_number_key() + 1 - from_start; |
| 211 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 211 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 212 int start = to_start + copy_size; | 212 int start = to_start + copy_size; |
| 213 int length = to_base->length() - start; | 213 int length = to_base->length() - start; |
| 214 if (length > 0) { | 214 if (length > 0) { |
| 215 Heap* heap = from->GetHeap(); | 215 Heap* heap = from->GetHeap(); |
| 216 MemsetPointer(FixedArray::cast(to_base)->data_start() + start, | 216 MemsetPointer(FixedArray::cast(to_base)->data_start() + start, |
| 217 heap->the_hole_value(), length); | 217 heap->the_hole_value(), length); |
| 218 } | 218 } |
| 219 } | 219 } |
| 220 } | 220 } |
| 221 ASSERT(to_base != from_base); | 221 DCHECK(to_base != from_base); |
| 222 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 222 DCHECK(IsFastSmiOrObjectElementsKind(to_kind)); |
| 223 if (copy_size == 0) return; | 223 if (copy_size == 0) return; |
| 224 FixedArray* to = FixedArray::cast(to_base); | 224 FixedArray* to = FixedArray::cast(to_base); |
| 225 uint32_t to_length = to->length(); | 225 uint32_t to_length = to->length(); |
| 226 if (to_start + copy_size > to_length) { | 226 if (to_start + copy_size > to_length) { |
| 227 copy_size = to_length - to_start; | 227 copy_size = to_length - to_start; |
| 228 } | 228 } |
| 229 for (int i = 0; i < copy_size; i++) { | 229 for (int i = 0; i < copy_size; i++) { |
| 230 int entry = from->FindEntry(i + from_start); | 230 int entry = from->FindEntry(i + from_start); |
| 231 if (entry != SeededNumberDictionary::kNotFound) { | 231 if (entry != SeededNumberDictionary::kNotFound) { |
| 232 Object* value = from->ValueAt(entry); | 232 Object* value = from->ValueAt(entry); |
| 233 ASSERT(!value->IsTheHole()); | 233 DCHECK(!value->IsTheHole()); |
| 234 to->set(i + to_start, value, SKIP_WRITE_BARRIER); | 234 to->set(i + to_start, value, SKIP_WRITE_BARRIER); |
| 235 } else { | 235 } else { |
| 236 to->set_the_hole(i + to_start); | 236 to->set_the_hole(i + to_start); |
| 237 } | 237 } |
| 238 } | 238 } |
| 239 if (IsFastObjectElementsKind(to_kind)) { | 239 if (IsFastObjectElementsKind(to_kind)) { |
| 240 if (!heap->InNewSpace(to)) { | 240 if (!heap->InNewSpace(to)) { |
| 241 heap->RecordWrites(to->address(), | 241 heap->RecordWrites(to->address(), |
| 242 to->OffsetOfElementAt(to_start), | 242 to->OffsetOfElementAt(to_start), |
| 243 copy_size); | 243 copy_size); |
| 244 } | 244 } |
| 245 heap->incremental_marking()->RecordWrites(to); | 245 heap->incremental_marking()->RecordWrites(to); |
| 246 } | 246 } |
| 247 } | 247 } |
| 248 | 248 |
| 249 | 249 |
| 250 static void CopyDoubleToObjectElements(Handle<FixedArrayBase> from_base, | 250 static void CopyDoubleToObjectElements(Handle<FixedArrayBase> from_base, |
| 251 uint32_t from_start, | 251 uint32_t from_start, |
| 252 Handle<FixedArrayBase> to_base, | 252 Handle<FixedArrayBase> to_base, |
| 253 ElementsKind to_kind, | 253 ElementsKind to_kind, |
| 254 uint32_t to_start, | 254 uint32_t to_start, |
| 255 int raw_copy_size) { | 255 int raw_copy_size) { |
| 256 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 256 DCHECK(IsFastSmiOrObjectElementsKind(to_kind)); |
| 257 int copy_size = raw_copy_size; | 257 int copy_size = raw_copy_size; |
| 258 if (raw_copy_size < 0) { | 258 if (raw_copy_size < 0) { |
| 259 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 259 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 260 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 260 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 261 copy_size = Min(from_base->length() - from_start, | 261 copy_size = Min(from_base->length() - from_start, |
| 262 to_base->length() - to_start); | 262 to_base->length() - to_start); |
| 263 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 263 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 264 // Also initialize the area that will be copied over since HeapNumber | 264 // Also initialize the area that will be copied over since HeapNumber |
| 265 // allocation below can cause an incremental marking step, requiring all | 265 // allocation below can cause an incremental marking step, requiring all |
| 266 // existing heap objects to be propertly initialized. | 266 // existing heap objects to be propertly initialized. |
| 267 int start = to_start; | 267 int start = to_start; |
| 268 int length = to_base->length() - start; | 268 int length = to_base->length() - start; |
| 269 if (length > 0) { | 269 if (length > 0) { |
| 270 Heap* heap = from_base->GetHeap(); | 270 Heap* heap = from_base->GetHeap(); |
| 271 MemsetPointer(FixedArray::cast(*to_base)->data_start() + start, | 271 MemsetPointer(FixedArray::cast(*to_base)->data_start() + start, |
| 272 heap->the_hole_value(), length); | 272 heap->the_hole_value(), length); |
| 273 } | 273 } |
| 274 } | 274 } |
| 275 } | 275 } |
| 276 ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && | 276 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
| 277 (copy_size + static_cast<int>(from_start)) <= from_base->length()); | 277 (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
| 278 if (copy_size == 0) return; | 278 if (copy_size == 0) return; |
| 279 Isolate* isolate = from_base->GetIsolate(); | 279 Isolate* isolate = from_base->GetIsolate(); |
| 280 Handle<FixedDoubleArray> from = Handle<FixedDoubleArray>::cast(from_base); | 280 Handle<FixedDoubleArray> from = Handle<FixedDoubleArray>::cast(from_base); |
| 281 Handle<FixedArray> to = Handle<FixedArray>::cast(to_base); | 281 Handle<FixedArray> to = Handle<FixedArray>::cast(to_base); |
| 282 for (int i = 0; i < copy_size; ++i) { | 282 for (int i = 0; i < copy_size; ++i) { |
| 283 HandleScope scope(isolate); | 283 HandleScope scope(isolate); |
| 284 if (IsFastSmiElementsKind(to_kind)) { | 284 if (IsFastSmiElementsKind(to_kind)) { |
| 285 UNIMPLEMENTED(); | 285 UNIMPLEMENTED(); |
| 286 } else { | 286 } else { |
| 287 ASSERT(IsFastObjectElementsKind(to_kind)); | 287 DCHECK(IsFastObjectElementsKind(to_kind)); |
| 288 Handle<Object> value = FixedDoubleArray::get(from, i + from_start); | 288 Handle<Object> value = FixedDoubleArray::get(from, i + from_start); |
| 289 to->set(i + to_start, *value, UPDATE_WRITE_BARRIER); | 289 to->set(i + to_start, *value, UPDATE_WRITE_BARRIER); |
| 290 } | 290 } |
| 291 } | 291 } |
| 292 } | 292 } |
| 293 | 293 |
| 294 | 294 |
| 295 static void CopyDoubleToDoubleElements(FixedArrayBase* from_base, | 295 static void CopyDoubleToDoubleElements(FixedArrayBase* from_base, |
| 296 uint32_t from_start, | 296 uint32_t from_start, |
| 297 FixedArrayBase* to_base, | 297 FixedArrayBase* to_base, |
| 298 uint32_t to_start, int raw_copy_size) { | 298 uint32_t to_start, int raw_copy_size) { |
| 299 DisallowHeapAllocation no_allocation; | 299 DisallowHeapAllocation no_allocation; |
| 300 int copy_size = raw_copy_size; | 300 int copy_size = raw_copy_size; |
| 301 if (raw_copy_size < 0) { | 301 if (raw_copy_size < 0) { |
| 302 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 302 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 303 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 303 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 304 copy_size = Min(from_base->length() - from_start, | 304 copy_size = Min(from_base->length() - from_start, |
| 305 to_base->length() - to_start); | 305 to_base->length() - to_start); |
| 306 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 306 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 307 for (int i = to_start + copy_size; i < to_base->length(); ++i) { | 307 for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
| 308 FixedDoubleArray::cast(to_base)->set_the_hole(i); | 308 FixedDoubleArray::cast(to_base)->set_the_hole(i); |
| 309 } | 309 } |
| 310 } | 310 } |
| 311 } | 311 } |
| 312 ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && | 312 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
| 313 (copy_size + static_cast<int>(from_start)) <= from_base->length()); | 313 (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
| 314 if (copy_size == 0) return; | 314 if (copy_size == 0) return; |
| 315 FixedDoubleArray* from = FixedDoubleArray::cast(from_base); | 315 FixedDoubleArray* from = FixedDoubleArray::cast(from_base); |
| 316 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); | 316 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); |
| 317 Address to_address = to->address() + FixedDoubleArray::kHeaderSize; | 317 Address to_address = to->address() + FixedDoubleArray::kHeaderSize; |
| 318 Address from_address = from->address() + FixedDoubleArray::kHeaderSize; | 318 Address from_address = from->address() + FixedDoubleArray::kHeaderSize; |
| 319 to_address += kDoubleSize * to_start; | 319 to_address += kDoubleSize * to_start; |
| 320 from_address += kDoubleSize * from_start; | 320 from_address += kDoubleSize * from_start; |
| 321 int words_per_double = (kDoubleSize / kPointerSize); | 321 int words_per_double = (kDoubleSize / kPointerSize); |
| 322 CopyWords(reinterpret_cast<Object**>(to_address), | 322 CopyWords(reinterpret_cast<Object**>(to_address), |
| 323 reinterpret_cast<Object**>(from_address), | 323 reinterpret_cast<Object**>(from_address), |
| 324 static_cast<size_t>(words_per_double * copy_size)); | 324 static_cast<size_t>(words_per_double * copy_size)); |
| 325 } | 325 } |
| 326 | 326 |
| 327 | 327 |
| 328 static void CopySmiToDoubleElements(FixedArrayBase* from_base, | 328 static void CopySmiToDoubleElements(FixedArrayBase* from_base, |
| 329 uint32_t from_start, | 329 uint32_t from_start, |
| 330 FixedArrayBase* to_base, uint32_t to_start, | 330 FixedArrayBase* to_base, uint32_t to_start, |
| 331 int raw_copy_size) { | 331 int raw_copy_size) { |
| 332 DisallowHeapAllocation no_allocation; | 332 DisallowHeapAllocation no_allocation; |
| 333 int copy_size = raw_copy_size; | 333 int copy_size = raw_copy_size; |
| 334 if (raw_copy_size < 0) { | 334 if (raw_copy_size < 0) { |
| 335 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 335 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 336 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 336 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 337 copy_size = from_base->length() - from_start; | 337 copy_size = from_base->length() - from_start; |
| 338 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 338 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 339 for (int i = to_start + copy_size; i < to_base->length(); ++i) { | 339 for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
| 340 FixedDoubleArray::cast(to_base)->set_the_hole(i); | 340 FixedDoubleArray::cast(to_base)->set_the_hole(i); |
| 341 } | 341 } |
| 342 } | 342 } |
| 343 } | 343 } |
| 344 ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && | 344 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
| 345 (copy_size + static_cast<int>(from_start)) <= from_base->length()); | 345 (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
| 346 if (copy_size == 0) return; | 346 if (copy_size == 0) return; |
| 347 FixedArray* from = FixedArray::cast(from_base); | 347 FixedArray* from = FixedArray::cast(from_base); |
| 348 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); | 348 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); |
| 349 Object* the_hole = from->GetHeap()->the_hole_value(); | 349 Object* the_hole = from->GetHeap()->the_hole_value(); |
| 350 for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size); | 350 for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size); |
| 351 from_start < from_end; from_start++, to_start++) { | 351 from_start < from_end; from_start++, to_start++) { |
| 352 Object* hole_or_smi = from->get(from_start); | 352 Object* hole_or_smi = from->get(from_start); |
| 353 if (hole_or_smi == the_hole) { | 353 if (hole_or_smi == the_hole) { |
| 354 to->set_the_hole(to_start); | 354 to->set_the_hole(to_start); |
| 355 } else { | 355 } else { |
| 356 to->set(to_start, Smi::cast(hole_or_smi)->value()); | 356 to->set(to_start, Smi::cast(hole_or_smi)->value()); |
| 357 } | 357 } |
| 358 } | 358 } |
| 359 } | 359 } |
| 360 | 360 |
| 361 | 361 |
| 362 static void CopyPackedSmiToDoubleElements(FixedArrayBase* from_base, | 362 static void CopyPackedSmiToDoubleElements(FixedArrayBase* from_base, |
| 363 uint32_t from_start, | 363 uint32_t from_start, |
| 364 FixedArrayBase* to_base, | 364 FixedArrayBase* to_base, |
| 365 uint32_t to_start, int packed_size, | 365 uint32_t to_start, int packed_size, |
| 366 int raw_copy_size) { | 366 int raw_copy_size) { |
| 367 DisallowHeapAllocation no_allocation; | 367 DisallowHeapAllocation no_allocation; |
| 368 int copy_size = raw_copy_size; | 368 int copy_size = raw_copy_size; |
| 369 uint32_t to_end; | 369 uint32_t to_end; |
| 370 if (raw_copy_size < 0) { | 370 if (raw_copy_size < 0) { |
| 371 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 371 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 372 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 372 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 373 copy_size = packed_size - from_start; | 373 copy_size = packed_size - from_start; |
| 374 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 374 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 375 to_end = to_base->length(); | 375 to_end = to_base->length(); |
| 376 for (uint32_t i = to_start + copy_size; i < to_end; ++i) { | 376 for (uint32_t i = to_start + copy_size; i < to_end; ++i) { |
| 377 FixedDoubleArray::cast(to_base)->set_the_hole(i); | 377 FixedDoubleArray::cast(to_base)->set_the_hole(i); |
| 378 } | 378 } |
| 379 } else { | 379 } else { |
| 380 to_end = to_start + static_cast<uint32_t>(copy_size); | 380 to_end = to_start + static_cast<uint32_t>(copy_size); |
| 381 } | 381 } |
| 382 } else { | 382 } else { |
| 383 to_end = to_start + static_cast<uint32_t>(copy_size); | 383 to_end = to_start + static_cast<uint32_t>(copy_size); |
| 384 } | 384 } |
| 385 ASSERT(static_cast<int>(to_end) <= to_base->length()); | 385 DCHECK(static_cast<int>(to_end) <= to_base->length()); |
| 386 ASSERT(packed_size >= 0 && packed_size <= copy_size); | 386 DCHECK(packed_size >= 0 && packed_size <= copy_size); |
| 387 ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && | 387 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
| 388 (copy_size + static_cast<int>(from_start)) <= from_base->length()); | 388 (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
| 389 if (copy_size == 0) return; | 389 if (copy_size == 0) return; |
| 390 FixedArray* from = FixedArray::cast(from_base); | 390 FixedArray* from = FixedArray::cast(from_base); |
| 391 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); | 391 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); |
| 392 for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size); | 392 for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size); |
| 393 from_start < from_end; from_start++, to_start++) { | 393 from_start < from_end; from_start++, to_start++) { |
| 394 Object* smi = from->get(from_start); | 394 Object* smi = from->get(from_start); |
| 395 ASSERT(!smi->IsTheHole()); | 395 DCHECK(!smi->IsTheHole()); |
| 396 to->set(to_start, Smi::cast(smi)->value()); | 396 to->set(to_start, Smi::cast(smi)->value()); |
| 397 } | 397 } |
| 398 } | 398 } |
| 399 | 399 |
| 400 | 400 |
| 401 static void CopyObjectToDoubleElements(FixedArrayBase* from_base, | 401 static void CopyObjectToDoubleElements(FixedArrayBase* from_base, |
| 402 uint32_t from_start, | 402 uint32_t from_start, |
| 403 FixedArrayBase* to_base, | 403 FixedArrayBase* to_base, |
| 404 uint32_t to_start, int raw_copy_size) { | 404 uint32_t to_start, int raw_copy_size) { |
| 405 DisallowHeapAllocation no_allocation; | 405 DisallowHeapAllocation no_allocation; |
| 406 int copy_size = raw_copy_size; | 406 int copy_size = raw_copy_size; |
| 407 if (raw_copy_size < 0) { | 407 if (raw_copy_size < 0) { |
| 408 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 408 DCHECK(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 409 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 409 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 410 copy_size = from_base->length() - from_start; | 410 copy_size = from_base->length() - from_start; |
| 411 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 411 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 412 for (int i = to_start + copy_size; i < to_base->length(); ++i) { | 412 for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
| 413 FixedDoubleArray::cast(to_base)->set_the_hole(i); | 413 FixedDoubleArray::cast(to_base)->set_the_hole(i); |
| 414 } | 414 } |
| 415 } | 415 } |
| 416 } | 416 } |
| 417 ASSERT((copy_size + static_cast<int>(to_start)) <= to_base->length() && | 417 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() && |
| 418 (copy_size + static_cast<int>(from_start)) <= from_base->length()); | 418 (copy_size + static_cast<int>(from_start)) <= from_base->length()); |
| 419 if (copy_size == 0) return; | 419 if (copy_size == 0) return; |
| 420 FixedArray* from = FixedArray::cast(from_base); | 420 FixedArray* from = FixedArray::cast(from_base); |
| 421 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); | 421 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); |
| 422 Object* the_hole = from->GetHeap()->the_hole_value(); | 422 Object* the_hole = from->GetHeap()->the_hole_value(); |
| 423 for (uint32_t from_end = from_start + copy_size; | 423 for (uint32_t from_end = from_start + copy_size; |
| 424 from_start < from_end; from_start++, to_start++) { | 424 from_start < from_end; from_start++, to_start++) { |
| 425 Object* hole_or_object = from->get(from_start); | 425 Object* hole_or_object = from->get(from_start); |
| 426 if (hole_or_object == the_hole) { | 426 if (hole_or_object == the_hole) { |
| 427 to->set_the_hole(to_start); | 427 to->set_the_hole(to_start); |
| 428 } else { | 428 } else { |
| 429 to->set(to_start, hole_or_object->Number()); | 429 to->set(to_start, hole_or_object->Number()); |
| 430 } | 430 } |
| 431 } | 431 } |
| 432 } | 432 } |
| 433 | 433 |
| 434 | 434 |
| 435 static void CopyDictionaryToDoubleElements(FixedArrayBase* from_base, | 435 static void CopyDictionaryToDoubleElements(FixedArrayBase* from_base, |
| 436 uint32_t from_start, | 436 uint32_t from_start, |
| 437 FixedArrayBase* to_base, | 437 FixedArrayBase* to_base, |
| 438 uint32_t to_start, | 438 uint32_t to_start, |
| 439 int raw_copy_size) { | 439 int raw_copy_size) { |
| 440 DisallowHeapAllocation no_allocation; | 440 DisallowHeapAllocation no_allocation; |
| 441 SeededNumberDictionary* from = SeededNumberDictionary::cast(from_base); | 441 SeededNumberDictionary* from = SeededNumberDictionary::cast(from_base); |
| 442 int copy_size = raw_copy_size; | 442 int copy_size = raw_copy_size; |
| 443 if (copy_size < 0) { | 443 if (copy_size < 0) { |
| 444 ASSERT(copy_size == ElementsAccessor::kCopyToEnd || | 444 DCHECK(copy_size == ElementsAccessor::kCopyToEnd || |
| 445 copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 445 copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 446 copy_size = from->max_number_key() + 1 - from_start; | 446 copy_size = from->max_number_key() + 1 - from_start; |
| 447 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 447 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 448 for (int i = to_start + copy_size; i < to_base->length(); ++i) { | 448 for (int i = to_start + copy_size; i < to_base->length(); ++i) { |
| 449 FixedDoubleArray::cast(to_base)->set_the_hole(i); | 449 FixedDoubleArray::cast(to_base)->set_the_hole(i); |
| 450 } | 450 } |
| 451 } | 451 } |
| 452 } | 452 } |
| 453 if (copy_size == 0) return; | 453 if (copy_size == 0) return; |
| 454 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); | 454 FixedDoubleArray* to = FixedDoubleArray::cast(to_base); |
| (...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 734 UNREACHABLE(); | 734 UNREACHABLE(); |
| 735 } | 735 } |
| 736 | 736 |
| 737 virtual void CopyElements( | 737 virtual void CopyElements( |
| 738 Handle<FixedArrayBase> from, | 738 Handle<FixedArrayBase> from, |
| 739 uint32_t from_start, | 739 uint32_t from_start, |
| 740 ElementsKind from_kind, | 740 ElementsKind from_kind, |
| 741 Handle<FixedArrayBase> to, | 741 Handle<FixedArrayBase> to, |
| 742 uint32_t to_start, | 742 uint32_t to_start, |
| 743 int copy_size) V8_FINAL V8_OVERRIDE { | 743 int copy_size) V8_FINAL V8_OVERRIDE { |
| 744 ASSERT(!from.is_null()); | 744 DCHECK(!from.is_null()); |
| 745 ElementsAccessorSubclass::CopyElementsImpl( | 745 ElementsAccessorSubclass::CopyElementsImpl( |
| 746 from, from_start, to, from_kind, to_start, kPackedSizeNotKnown, | 746 from, from_start, to, from_kind, to_start, kPackedSizeNotKnown, |
| 747 copy_size); | 747 copy_size); |
| 748 } | 748 } |
| 749 | 749 |
| 750 virtual void CopyElements( | 750 virtual void CopyElements( |
| 751 JSObject* from_holder, | 751 JSObject* from_holder, |
| 752 uint32_t from_start, | 752 uint32_t from_start, |
| 753 ElementsKind from_kind, | 753 ElementsKind from_kind, |
| 754 Handle<FixedArrayBase> to, | 754 Handle<FixedArrayBase> to, |
| (...skipping 13 matching lines...) Expand all Loading... |
| 768 ElementsAccessorSubclass::CopyElementsImpl( | 768 ElementsAccessorSubclass::CopyElementsImpl( |
| 769 from, from_start, to, from_kind, to_start, packed_size, copy_size); | 769 from, from_start, to, from_kind, to_start, packed_size, copy_size); |
| 770 } | 770 } |
| 771 | 771 |
| 772 virtual MaybeHandle<FixedArray> AddElementsToFixedArray( | 772 virtual MaybeHandle<FixedArray> AddElementsToFixedArray( |
| 773 Handle<Object> receiver, | 773 Handle<Object> receiver, |
| 774 Handle<JSObject> holder, | 774 Handle<JSObject> holder, |
| 775 Handle<FixedArray> to, | 775 Handle<FixedArray> to, |
| 776 Handle<FixedArrayBase> from) V8_FINAL V8_OVERRIDE { | 776 Handle<FixedArrayBase> from) V8_FINAL V8_OVERRIDE { |
| 777 int len0 = to->length(); | 777 int len0 = to->length(); |
| 778 #ifdef ENABLE_SLOW_ASSERTS | 778 #ifdef ENABLE_SLOW_DCHECKS |
| 779 if (FLAG_enable_slow_asserts) { | 779 if (FLAG_enable_slow_asserts) { |
| 780 for (int i = 0; i < len0; i++) { | 780 for (int i = 0; i < len0; i++) { |
| 781 ASSERT(!to->get(i)->IsTheHole()); | 781 DCHECK(!to->get(i)->IsTheHole()); |
| 782 } | 782 } |
| 783 } | 783 } |
| 784 #endif | 784 #endif |
| 785 | 785 |
| 786 // Optimize if 'other' is empty. | 786 // Optimize if 'other' is empty. |
| 787 // We cannot optimize if 'this' is empty, as other may have holes. | 787 // We cannot optimize if 'this' is empty, as other may have holes. |
| 788 uint32_t len1 = ElementsAccessorSubclass::GetCapacityImpl(from); | 788 uint32_t len1 = ElementsAccessorSubclass::GetCapacityImpl(from); |
| 789 if (len1 == 0) return to; | 789 if (len1 == 0) return to; |
| 790 | 790 |
| 791 Isolate* isolate = from->GetIsolate(); | 791 Isolate* isolate = from->GetIsolate(); |
| 792 | 792 |
| 793 // Compute how many elements are not in other. | 793 // Compute how many elements are not in other. |
| 794 uint32_t extra = 0; | 794 uint32_t extra = 0; |
| 795 for (uint32_t y = 0; y < len1; y++) { | 795 for (uint32_t y = 0; y < len1; y++) { |
| 796 uint32_t key = ElementsAccessorSubclass::GetKeyForIndexImpl(from, y); | 796 uint32_t key = ElementsAccessorSubclass::GetKeyForIndexImpl(from, y); |
| 797 if (ElementsAccessorSubclass::HasElementImpl( | 797 if (ElementsAccessorSubclass::HasElementImpl( |
| 798 receiver, holder, key, from)) { | 798 receiver, holder, key, from)) { |
| 799 Handle<Object> value; | 799 Handle<Object> value; |
| 800 ASSIGN_RETURN_ON_EXCEPTION( | 800 ASSIGN_RETURN_ON_EXCEPTION( |
| 801 isolate, value, | 801 isolate, value, |
| 802 ElementsAccessorSubclass::GetImpl(receiver, holder, key, from), | 802 ElementsAccessorSubclass::GetImpl(receiver, holder, key, from), |
| 803 FixedArray); | 803 FixedArray); |
| 804 | 804 |
| 805 ASSERT(!value->IsTheHole()); | 805 DCHECK(!value->IsTheHole()); |
| 806 if (!HasKey(to, value)) { | 806 if (!HasKey(to, value)) { |
| 807 extra++; | 807 extra++; |
| 808 } | 808 } |
| 809 } | 809 } |
| 810 } | 810 } |
| 811 | 811 |
| 812 if (extra == 0) return to; | 812 if (extra == 0) return to; |
| 813 | 813 |
| 814 // Allocate the result | 814 // Allocate the result |
| 815 Handle<FixedArray> result = isolate->factory()->NewFixedArray(len0 + extra); | 815 Handle<FixedArray> result = isolate->factory()->NewFixedArray(len0 + extra); |
| 816 | 816 |
| 817 // Fill in the content | 817 // Fill in the content |
| 818 { | 818 { |
| 819 DisallowHeapAllocation no_gc; | 819 DisallowHeapAllocation no_gc; |
| 820 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 820 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
| 821 for (int i = 0; i < len0; i++) { | 821 for (int i = 0; i < len0; i++) { |
| 822 Object* e = to->get(i); | 822 Object* e = to->get(i); |
| 823 ASSERT(e->IsString() || e->IsNumber()); | 823 DCHECK(e->IsString() || e->IsNumber()); |
| 824 result->set(i, e, mode); | 824 result->set(i, e, mode); |
| 825 } | 825 } |
| 826 } | 826 } |
| 827 // Fill in the extra values. | 827 // Fill in the extra values. |
| 828 uint32_t index = 0; | 828 uint32_t index = 0; |
| 829 for (uint32_t y = 0; y < len1; y++) { | 829 for (uint32_t y = 0; y < len1; y++) { |
| 830 uint32_t key = | 830 uint32_t key = |
| 831 ElementsAccessorSubclass::GetKeyForIndexImpl(from, y); | 831 ElementsAccessorSubclass::GetKeyForIndexImpl(from, y); |
| 832 if (ElementsAccessorSubclass::HasElementImpl( | 832 if (ElementsAccessorSubclass::HasElementImpl( |
| 833 receiver, holder, key, from)) { | 833 receiver, holder, key, from)) { |
| 834 Handle<Object> value; | 834 Handle<Object> value; |
| 835 ASSIGN_RETURN_ON_EXCEPTION( | 835 ASSIGN_RETURN_ON_EXCEPTION( |
| 836 isolate, value, | 836 isolate, value, |
| 837 ElementsAccessorSubclass::GetImpl(receiver, holder, key, from), | 837 ElementsAccessorSubclass::GetImpl(receiver, holder, key, from), |
| 838 FixedArray); | 838 FixedArray); |
| 839 if (!value->IsTheHole() && !HasKey(to, value)) { | 839 if (!value->IsTheHole() && !HasKey(to, value)) { |
| 840 result->set(len0 + index, *value); | 840 result->set(len0 + index, *value); |
| 841 index++; | 841 index++; |
| 842 } | 842 } |
| 843 } | 843 } |
| 844 } | 844 } |
| 845 ASSERT(extra == index); | 845 DCHECK(extra == index); |
| 846 return result; | 846 return result; |
| 847 } | 847 } |
| 848 | 848 |
| 849 protected: | 849 protected: |
| 850 static uint32_t GetCapacityImpl(Handle<FixedArrayBase> backing_store) { | 850 static uint32_t GetCapacityImpl(Handle<FixedArrayBase> backing_store) { |
| 851 return backing_store->length(); | 851 return backing_store->length(); |
| 852 } | 852 } |
| 853 | 853 |
| 854 virtual uint32_t GetCapacity(Handle<FixedArrayBase> backing_store) | 854 virtual uint32_t GetCapacity(Handle<FixedArrayBase> backing_store) |
| 855 V8_FINAL V8_OVERRIDE { | 855 V8_FINAL V8_OVERRIDE { |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 941 uint32_t new_capacity = length > min ? length : min; | 941 uint32_t new_capacity = length > min ? length : min; |
| 942 FastElementsAccessorSubclass::SetFastElementsCapacityAndLength( | 942 FastElementsAccessorSubclass::SetFastElementsCapacityAndLength( |
| 943 array, new_capacity, length); | 943 array, new_capacity, length); |
| 944 JSObject::ValidateElements(array); | 944 JSObject::ValidateElements(array); |
| 945 return length_object; | 945 return length_object; |
| 946 } | 946 } |
| 947 | 947 |
| 948 static Handle<Object> DeleteCommon(Handle<JSObject> obj, | 948 static Handle<Object> DeleteCommon(Handle<JSObject> obj, |
| 949 uint32_t key, | 949 uint32_t key, |
| 950 JSReceiver::DeleteMode mode) { | 950 JSReceiver::DeleteMode mode) { |
| 951 ASSERT(obj->HasFastSmiOrObjectElements() || | 951 DCHECK(obj->HasFastSmiOrObjectElements() || |
| 952 obj->HasFastDoubleElements() || | 952 obj->HasFastDoubleElements() || |
| 953 obj->HasFastArgumentsElements()); | 953 obj->HasFastArgumentsElements()); |
| 954 Isolate* isolate = obj->GetIsolate(); | 954 Isolate* isolate = obj->GetIsolate(); |
| 955 Heap* heap = obj->GetHeap(); | 955 Heap* heap = obj->GetHeap(); |
| 956 Handle<FixedArrayBase> elements(obj->elements()); | 956 Handle<FixedArrayBase> elements(obj->elements()); |
| 957 if (*elements == heap->empty_fixed_array()) { | 957 if (*elements == heap->empty_fixed_array()) { |
| 958 return isolate->factory()->true_value(); | 958 return isolate->factory()->true_value(); |
| 959 } | 959 } |
| 960 Handle<BackingStore> backing_store = Handle<BackingStore>::cast(elements); | 960 Handle<BackingStore> backing_store = Handle<BackingStore>::cast(elements); |
| 961 bool is_sloppy_arguments_elements_map = | 961 bool is_sloppy_arguments_elements_map = |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1021 } | 1021 } |
| 1022 return !Handle<BackingStore>::cast(backing_store)->is_the_hole(key); | 1022 return !Handle<BackingStore>::cast(backing_store)->is_the_hole(key); |
| 1023 } | 1023 } |
| 1024 | 1024 |
| 1025 static void ValidateContents(Handle<JSObject> holder, int length) { | 1025 static void ValidateContents(Handle<JSObject> holder, int length) { |
| 1026 #if DEBUG | 1026 #if DEBUG |
| 1027 Isolate* isolate = holder->GetIsolate(); | 1027 Isolate* isolate = holder->GetIsolate(); |
| 1028 HandleScope scope(isolate); | 1028 HandleScope scope(isolate); |
| 1029 Handle<FixedArrayBase> elements(holder->elements(), isolate); | 1029 Handle<FixedArrayBase> elements(holder->elements(), isolate); |
| 1030 Map* map = elements->map(); | 1030 Map* map = elements->map(); |
| 1031 ASSERT((IsFastSmiOrObjectElementsKind(KindTraits::Kind) && | 1031 DCHECK((IsFastSmiOrObjectElementsKind(KindTraits::Kind) && |
| 1032 (map == isolate->heap()->fixed_array_map() || | 1032 (map == isolate->heap()->fixed_array_map() || |
| 1033 map == isolate->heap()->fixed_cow_array_map())) || | 1033 map == isolate->heap()->fixed_cow_array_map())) || |
| 1034 (IsFastDoubleElementsKind(KindTraits::Kind) == | 1034 (IsFastDoubleElementsKind(KindTraits::Kind) == |
| 1035 ((map == isolate->heap()->fixed_array_map() && length == 0) || | 1035 ((map == isolate->heap()->fixed_array_map() && length == 0) || |
| 1036 map == isolate->heap()->fixed_double_array_map()))); | 1036 map == isolate->heap()->fixed_double_array_map()))); |
| 1037 DisallowHeapAllocation no_gc; | 1037 DisallowHeapAllocation no_gc; |
| 1038 for (int i = 0; i < length; i++) { | 1038 for (int i = 0; i < length; i++) { |
| 1039 HandleScope scope(isolate); | 1039 HandleScope scope(isolate); |
| 1040 Handle<BackingStore> backing_store = Handle<BackingStore>::cast(elements); | 1040 Handle<BackingStore> backing_store = Handle<BackingStore>::cast(elements); |
| 1041 ASSERT((!IsFastSmiElementsKind(KindTraits::Kind) || | 1041 DCHECK((!IsFastSmiElementsKind(KindTraits::Kind) || |
| 1042 BackingStore::get(backing_store, i)->IsSmi()) || | 1042 BackingStore::get(backing_store, i)->IsSmi()) || |
| 1043 (IsFastHoleyElementsKind(KindTraits::Kind) == | 1043 (IsFastHoleyElementsKind(KindTraits::Kind) == |
| 1044 backing_store->is_the_hole(i))); | 1044 backing_store->is_the_hole(i))); |
| 1045 } | 1045 } |
| 1046 #endif | 1046 #endif |
| 1047 } | 1047 } |
| 1048 }; | 1048 }; |
| 1049 | 1049 |
| 1050 | 1050 |
| 1051 static inline ElementsKind ElementsKindForArray(Handle<FixedArrayBase> array) { | 1051 static inline ElementsKind ElementsKindForArray(Handle<FixedArrayBase> array) { |
| (...skipping 560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1612 Handle<JSObject> obj, | 1612 Handle<JSObject> obj, |
| 1613 uint32_t key, | 1613 uint32_t key, |
| 1614 Handle<FixedArrayBase> parameters) { | 1614 Handle<FixedArrayBase> parameters) { |
| 1615 Isolate* isolate = obj->GetIsolate(); | 1615 Isolate* isolate = obj->GetIsolate(); |
| 1616 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(parameters); | 1616 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(parameters); |
| 1617 Handle<Object> probe = GetParameterMapArg(obj, parameter_map, key); | 1617 Handle<Object> probe = GetParameterMapArg(obj, parameter_map, key); |
| 1618 if (!probe->IsTheHole()) { | 1618 if (!probe->IsTheHole()) { |
| 1619 DisallowHeapAllocation no_gc; | 1619 DisallowHeapAllocation no_gc; |
| 1620 Context* context = Context::cast(parameter_map->get(0)); | 1620 Context* context = Context::cast(parameter_map->get(0)); |
| 1621 int context_index = Handle<Smi>::cast(probe)->value(); | 1621 int context_index = Handle<Smi>::cast(probe)->value(); |
| 1622 ASSERT(!context->get(context_index)->IsTheHole()); | 1622 DCHECK(!context->get(context_index)->IsTheHole()); |
| 1623 return handle(context->get(context_index), isolate); | 1623 return handle(context->get(context_index), isolate); |
| 1624 } else { | 1624 } else { |
| 1625 // Object is not mapped, defer to the arguments. | 1625 // Object is not mapped, defer to the arguments. |
| 1626 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)), | 1626 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1)), |
| 1627 isolate); | 1627 isolate); |
| 1628 Handle<Object> result; | 1628 Handle<Object> result; |
| 1629 ASSIGN_RETURN_ON_EXCEPTION( | 1629 ASSIGN_RETURN_ON_EXCEPTION( |
| 1630 isolate, result, | 1630 isolate, result, |
| 1631 ElementsAccessor::ForArray(arguments)->Get( | 1631 ElementsAccessor::ForArray(arguments)->Get( |
| 1632 receiver, obj, key, arguments), | 1632 receiver, obj, key, arguments), |
| 1633 Object); | 1633 Object); |
| 1634 // Elements of the arguments object in slow mode might be slow aliases. | 1634 // Elements of the arguments object in slow mode might be slow aliases. |
| 1635 if (result->IsAliasedArgumentsEntry()) { | 1635 if (result->IsAliasedArgumentsEntry()) { |
| 1636 DisallowHeapAllocation no_gc; | 1636 DisallowHeapAllocation no_gc; |
| 1637 AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(*result); | 1637 AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(*result); |
| 1638 Context* context = Context::cast(parameter_map->get(0)); | 1638 Context* context = Context::cast(parameter_map->get(0)); |
| 1639 int context_index = entry->aliased_context_slot(); | 1639 int context_index = entry->aliased_context_slot(); |
| 1640 ASSERT(!context->get(context_index)->IsTheHole()); | 1640 DCHECK(!context->get(context_index)->IsTheHole()); |
| 1641 return handle(context->get(context_index), isolate); | 1641 return handle(context->get(context_index), isolate); |
| 1642 } else { | 1642 } else { |
| 1643 return result; | 1643 return result; |
| 1644 } | 1644 } |
| 1645 } | 1645 } |
| 1646 } | 1646 } |
| 1647 | 1647 |
| 1648 MUST_USE_RESULT static PropertyAttributes GetAttributesImpl( | 1648 MUST_USE_RESULT static PropertyAttributes GetAttributesImpl( |
| 1649 Handle<Object> receiver, | 1649 Handle<Object> receiver, |
| 1650 Handle<JSObject> obj, | 1650 Handle<JSObject> obj, |
| (...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1831 | 1831 |
| 1832 // Fast case: The new length fits into a Smi. | 1832 // Fast case: The new length fits into a Smi. |
| 1833 Handle<Object> smi_length; | 1833 Handle<Object> smi_length; |
| 1834 | 1834 |
| 1835 if (Object::ToSmi(isolate, length).ToHandle(&smi_length) && | 1835 if (Object::ToSmi(isolate, length).ToHandle(&smi_length) && |
| 1836 smi_length->IsSmi()) { | 1836 smi_length->IsSmi()) { |
| 1837 const int value = Handle<Smi>::cast(smi_length)->value(); | 1837 const int value = Handle<Smi>::cast(smi_length)->value(); |
| 1838 if (value >= 0) { | 1838 if (value >= 0) { |
| 1839 Handle<Object> new_length = ElementsAccessorSubclass:: | 1839 Handle<Object> new_length = ElementsAccessorSubclass:: |
| 1840 SetLengthWithoutNormalize(backing_store, array, smi_length, value); | 1840 SetLengthWithoutNormalize(backing_store, array, smi_length, value); |
| 1841 ASSERT(!new_length.is_null()); | 1841 DCHECK(!new_length.is_null()); |
| 1842 | 1842 |
| 1843 // even though the proposed length was a smi, new_length could | 1843 // even though the proposed length was a smi, new_length could |
| 1844 // still be a heap number because SetLengthWithoutNormalize doesn't | 1844 // still be a heap number because SetLengthWithoutNormalize doesn't |
| 1845 // allow the array length property to drop below the index of | 1845 // allow the array length property to drop below the index of |
| 1846 // non-deletable elements. | 1846 // non-deletable elements. |
| 1847 ASSERT(new_length->IsSmi() || new_length->IsHeapNumber() || | 1847 DCHECK(new_length->IsSmi() || new_length->IsHeapNumber() || |
| 1848 new_length->IsUndefined()); | 1848 new_length->IsUndefined()); |
| 1849 if (new_length->IsSmi()) { | 1849 if (new_length->IsSmi()) { |
| 1850 array->set_length(*Handle<Smi>::cast(new_length)); | 1850 array->set_length(*Handle<Smi>::cast(new_length)); |
| 1851 return array; | 1851 return array; |
| 1852 } else if (new_length->IsHeapNumber()) { | 1852 } else if (new_length->IsHeapNumber()) { |
| 1853 array->set_length(*new_length); | 1853 array->set_length(*new_length); |
| 1854 return array; | 1854 return array; |
| 1855 } | 1855 } |
| 1856 } else { | 1856 } else { |
| 1857 return ThrowArrayLengthRangeError(isolate); | 1857 return ThrowArrayLengthRangeError(isolate); |
| 1858 } | 1858 } |
| 1859 } | 1859 } |
| 1860 | 1860 |
| 1861 // Slow case: The new length does not fit into a Smi or conversion | 1861 // Slow case: The new length does not fit into a Smi or conversion |
| 1862 // to slow elements is needed for other reasons. | 1862 // to slow elements is needed for other reasons. |
| 1863 if (length->IsNumber()) { | 1863 if (length->IsNumber()) { |
| 1864 uint32_t value; | 1864 uint32_t value; |
| 1865 if (length->ToArrayIndex(&value)) { | 1865 if (length->ToArrayIndex(&value)) { |
| 1866 Handle<SeededNumberDictionary> dictionary = | 1866 Handle<SeededNumberDictionary> dictionary = |
| 1867 JSObject::NormalizeElements(array); | 1867 JSObject::NormalizeElements(array); |
| 1868 ASSERT(!dictionary.is_null()); | 1868 DCHECK(!dictionary.is_null()); |
| 1869 | 1869 |
| 1870 Handle<Object> new_length = DictionaryElementsAccessor:: | 1870 Handle<Object> new_length = DictionaryElementsAccessor:: |
| 1871 SetLengthWithoutNormalize(dictionary, array, length, value); | 1871 SetLengthWithoutNormalize(dictionary, array, length, value); |
| 1872 ASSERT(!new_length.is_null()); | 1872 DCHECK(!new_length.is_null()); |
| 1873 | 1873 |
| 1874 ASSERT(new_length->IsNumber()); | 1874 DCHECK(new_length->IsNumber()); |
| 1875 array->set_length(*new_length); | 1875 array->set_length(*new_length); |
| 1876 return array; | 1876 return array; |
| 1877 } else { | 1877 } else { |
| 1878 return ThrowArrayLengthRangeError(isolate); | 1878 return ThrowArrayLengthRangeError(isolate); |
| 1879 } | 1879 } |
| 1880 } | 1880 } |
| 1881 | 1881 |
| 1882 // Fall-back case: The new length is not a number so make the array | 1882 // Fall-back case: The new length is not a number so make the array |
| 1883 // size one and set only element to length. | 1883 // size one and set only element to length. |
| 1884 Handle<FixedArray> new_backing_store = isolate->factory()->NewFixedArray(1); | 1884 Handle<FixedArray> new_backing_store = isolate->factory()->NewFixedArray(1); |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1974 UNREACHABLE(); | 1974 UNREACHABLE(); |
| 1975 break; | 1975 break; |
| 1976 } | 1976 } |
| 1977 | 1977 |
| 1978 array->set_elements(*elms); | 1978 array->set_elements(*elms); |
| 1979 array->set_length(Smi::FromInt(number_of_elements)); | 1979 array->set_length(Smi::FromInt(number_of_elements)); |
| 1980 return array; | 1980 return array; |
| 1981 } | 1981 } |
| 1982 | 1982 |
| 1983 } } // namespace v8::internal | 1983 } } // namespace v8::internal |
| OLD | NEW |