OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
293 Heap::RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len); | 293 Heap::RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len); |
294 } | 294 } |
295 } | 295 } |
296 | 296 |
297 | 297 |
298 static void FillWithHoles(FixedArray* dst, int from, int to) { | 298 static void FillWithHoles(FixedArray* dst, int from, int to) { |
299 MemsetPointer(dst->data_start() + from, Heap::the_hole_value(), to - from); | 299 MemsetPointer(dst->data_start() + from, Heap::the_hole_value(), to - from); |
300 } | 300 } |
301 | 301 |
302 | 302 |
| 303 static FixedArray* LeftTrimFixedArray(FixedArray* elms) { |
| 304 // For now this trick is only applied to fixed arrays in new space. |
| 305 // In large object space the object's start must coincide with chunk |
| 306 // and thus the trick is just not applicable. |
| 307 // In old space we do not use this trick to avoid dealing with |
| 308 // remembered sets. |
| 309 ASSERT(Heap::new_space()->Contains(elms)); |
| 310 |
| 311 STATIC_ASSERT(FixedArray::kMapOffset == 0); |
| 312 STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize); |
| 313 STATIC_ASSERT(FixedArray::kHeaderSize == 2 * kPointerSize); |
| 314 |
| 315 Object** former_start = HeapObject::RawField(elms, 0); |
| 316 |
| 317 const int len = elms->length(); |
| 318 |
| 319 // Technically in new space this write might be omitted (except for |
| 320 // debug mode which iterates through the heap), but to play safer |
| 321 // we still do it. |
| 322 former_start[0] = Heap::raw_unchecked_one_pointer_filler_map(); |
| 323 |
| 324 former_start[1] = Heap::fixed_array_map(); |
| 325 former_start[2] = reinterpret_cast<Object*>(len - 1); |
| 326 |
| 327 ASSERT_EQ(elms->address() + kPointerSize, (elms + kPointerSize)->address()); |
| 328 return elms + kPointerSize; |
| 329 } |
| 330 |
| 331 |
| 332 static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) { |
| 333 // For now this trick is only applied to fixed arrays in new space. |
| 334 // In large object space the object's start must coincide with chunk |
| 335 // and thus the trick is just not applicable. |
| 336 // In old space we do not use this trick to avoid dealing with |
| 337 // remembered sets. |
| 338 ASSERT(Heap::new_space()->Contains(elms)); |
| 339 |
| 340 STATIC_ASSERT(FixedArray::kMapOffset == 0); |
| 341 STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize); |
| 342 STATIC_ASSERT(FixedArray::kHeaderSize == 2 * kPointerSize); |
| 343 |
| 344 Object** former_start = HeapObject::RawField(elms, 0); |
| 345 |
| 346 const int len = elms->length(); |
| 347 |
| 348 // Technically in new space this write might be omitted (except for |
| 349 // debug mode which iterates through the heap), but to play safer |
| 350 // we still do it. |
| 351 if (to_trim == 1) { |
| 352 former_start[0] = Heap::raw_unchecked_one_pointer_filler_map(); |
| 353 } else if (to_trim == 2) { |
| 354 former_start[0] = Heap::raw_unchecked_two_pointer_filler_map(); |
| 355 } else { |
| 356 former_start[0] = Heap::raw_unchecked_byte_array_map(); |
| 357 ByteArray* as_byte_array = reinterpret_cast<ByteArray*>(elms); |
| 358 as_byte_array->set_length(ByteArray::LengthFor(to_trim * kPointerSize)); |
| 359 } |
| 360 |
| 361 former_start[to_trim] = Heap::fixed_array_map(); |
| 362 former_start[to_trim + 1] = reinterpret_cast<Object*>(len - to_trim); |
| 363 |
| 364 ASSERT_EQ(elms->address() + to_trim * kPointerSize, |
| 365 (elms + to_trim * kPointerSize)->address()); |
| 366 return elms + to_trim * kPointerSize; |
| 367 } |
| 368 |
| 369 |
303 static bool ArrayPrototypeHasNoElements() { | 370 static bool ArrayPrototypeHasNoElements() { |
304 // This method depends on non writability of Object and Array prototype | 371 // This method depends on non writability of Object and Array prototype |
305 // fields. | 372 // fields. |
306 Context* global_context = Top::context()->global_context(); | 373 Context* global_context = Top::context()->global_context(); |
307 // Array.prototype | 374 // Array.prototype |
308 JSObject* proto = | 375 JSObject* proto = |
309 JSObject::cast(global_context->array_function()->prototype()); | 376 JSObject::cast(global_context->array_function()->prototype()); |
310 if (proto->elements() != Heap::empty_fixed_array()) return false; | 377 if (proto->elements() != Heap::empty_fixed_array()) return false; |
311 // Hidden prototype | 378 // Hidden prototype |
312 proto = JSObject::cast(proto->GetPrototype()); | 379 proto = JSObject::cast(proto->GetPrototype()); |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
439 // Remember to check the prototype chain. | 506 // Remember to check the prototype chain. |
440 JSFunction* array_function = | 507 JSFunction* array_function = |
441 Top::context()->global_context()->array_function(); | 508 Top::context()->global_context()->array_function(); |
442 JSObject* prototype = JSObject::cast(array_function->prototype()); | 509 JSObject* prototype = JSObject::cast(array_function->prototype()); |
443 top = prototype->GetElement(len - 1); | 510 top = prototype->GetElement(len - 1); |
444 | 511 |
445 return top; | 512 return top; |
446 } | 513 } |
447 | 514 |
448 | 515 |
449 static FixedArray* LeftTrimFixedArray(FixedArray* elms) { | |
450 // For now this trick is only applied to fixed arrays in new space. | |
451 // In large object space the object's start must coincide with chunk | |
452 // and thus the trick is just not applicable. | |
453 // In old space we do not use this trick to avoid dealing with | |
454 // remembered sets. | |
455 ASSERT(Heap::new_space()->Contains(elms)); | |
456 | |
457 Object** former_map = | |
458 HeapObject::RawField(elms, FixedArray::kMapOffset); | |
459 Object** former_length = | |
460 HeapObject::RawField(elms, FixedArray::kLengthOffset); | |
461 Object** former_first = | |
462 HeapObject::RawField(elms, FixedArray::kHeaderSize); | |
463 // Check that we don't forget to copy all the bits. | |
464 STATIC_ASSERT(FixedArray::kMapOffset + 2 * kPointerSize | |
465 == FixedArray::kHeaderSize); | |
466 | |
467 int len = elms->length(); | |
468 | |
469 *former_first = reinterpret_cast<Object*>(len - 1); | |
470 *former_length = Heap::fixed_array_map(); | |
471 // Technically in new space this write might be omitted (except for | |
472 // debug mode which iterates through the heap), but to play safer | |
473 // we still do it. | |
474 *former_map = Heap::raw_unchecked_one_pointer_filler_map(); | |
475 | |
476 ASSERT(elms->address() + kPointerSize == (elms + kPointerSize)->address()); | |
477 return elms + kPointerSize; | |
478 } | |
479 | |
480 | |
481 BUILTIN(ArrayShift) { | 516 BUILTIN(ArrayShift) { |
482 Object* receiver = *args.receiver(); | 517 Object* receiver = *args.receiver(); |
483 FixedArray* elms = NULL; | 518 FixedArray* elms = NULL; |
484 if (!IsJSArrayWithFastElements(receiver, &elms) | 519 if (!IsJSArrayWithFastElements(receiver, &elms) |
485 || !ArrayPrototypeHasNoElements()) { | 520 || !ArrayPrototypeHasNoElements()) { |
486 return CallJsBuiltin("ArrayShift", args); | 521 return CallJsBuiltin("ArrayShift", args); |
487 } | 522 } |
488 JSArray* array = JSArray::cast(receiver); | 523 JSArray* array = JSArray::cast(receiver); |
489 ASSERT(array->HasFastElements()); | 524 ASSERT(array->HasFastElements()); |
490 | 525 |
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
711 // Set the length. | 746 // Set the length. |
712 result_array->set_length(Smi::FromInt(actual_delete_count)); | 747 result_array->set_length(Smi::FromInt(actual_delete_count)); |
713 } | 748 } |
714 | 749 |
715 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0; | 750 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0; |
716 | 751 |
717 int new_length = len - actual_delete_count + item_count; | 752 int new_length = len - actual_delete_count + item_count; |
718 | 753 |
719 if (item_count < actual_delete_count) { | 754 if (item_count < actual_delete_count) { |
720 // Shrink the array. | 755 // Shrink the array. |
721 AssertNoAllocation no_gc; | 756 const bool trim_array = Heap::new_space()->Contains(elms) && |
722 MoveElements(&no_gc, | 757 ((actual_start + item_count) < |
723 elms, actual_start + item_count, | 758 (len - actual_delete_count - actual_start)); |
724 elms, actual_start + actual_delete_count, | 759 if (trim_array) { |
725 (len - actual_delete_count - actual_start)); | 760 const int delta = actual_delete_count - item_count; |
726 FillWithHoles(elms, new_length, len); | 761 |
| 762 if (actual_start > 0) { |
| 763 Object** start = elms->data_start(); |
| 764 memmove(start + delta, start, actual_start * kPointerSize); |
| 765 } |
| 766 |
| 767 elms = LeftTrimFixedArray(elms, delta); |
| 768 array->set_elements(elms, SKIP_WRITE_BARRIER); |
| 769 } else { |
| 770 AssertNoAllocation no_gc; |
| 771 MoveElements(&no_gc, |
| 772 elms, actual_start + item_count, |
| 773 elms, actual_start + actual_delete_count, |
| 774 (len - actual_delete_count - actual_start)); |
| 775 FillWithHoles(elms, new_length, len); |
| 776 } |
727 } else if (item_count > actual_delete_count) { | 777 } else if (item_count > actual_delete_count) { |
728 // Currently fixed arrays cannot grow too big, so | 778 // Currently fixed arrays cannot grow too big, so |
729 // we should never hit this case. | 779 // we should never hit this case. |
730 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len)); | 780 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len)); |
731 | 781 |
732 // Check if array need to grow. | 782 // Check if array need to grow. |
733 if (new_length > elms->length()) { | 783 if (new_length > elms->length()) { |
734 // New backing storage is needed. | 784 // New backing storage is needed. |
735 int capacity = new_length + (new_length >> 1) + 16; | 785 int capacity = new_length + (new_length >> 1) + 16; |
736 Object* obj = Heap::AllocateUninitializedFixedArray(capacity); | 786 Object* obj = Heap::AllocateUninitializedFixedArray(capacity); |
(...skipping 745 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1482 if (entry->contains(pc)) { | 1532 if (entry->contains(pc)) { |
1483 return names_[i]; | 1533 return names_[i]; |
1484 } | 1534 } |
1485 } | 1535 } |
1486 } | 1536 } |
1487 return NULL; | 1537 return NULL; |
1488 } | 1538 } |
1489 | 1539 |
1490 | 1540 |
1491 } } // namespace v8::internal | 1541 } } // namespace v8::internal |
OLD | NEW |