Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 329 } | 329 } |
| 330 | 330 |
| 331 | 331 |
| 332 static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) { | 332 static FixedArray* LeftTrimFixedArray(FixedArray* elms, int to_trim) { |
| 333 // For now this trick is only applied to fixed arrays in new space. | 333 // For now this trick is only applied to fixed arrays in new space. |
| 334 // In large object space the object's start must coincide with chunk | 334 // In large object space the object's start must coincide with chunk |
| 335 // and thus the trick is just not applicable. | 335 // and thus the trick is just not applicable. |
| 336 // In old space we do not use this trick to avoid dealing with | 336 // In old space we do not use this trick to avoid dealing with |
| 337 // remembered sets. | 337 // remembered sets. |
| 338 ASSERT(Heap::new_space()->Contains(elms)); | 338 ASSERT(Heap::new_space()->Contains(elms)); |
| 339 ASSERT(to_trim > 0); | |
| 339 | 340 |
| 340 STATIC_ASSERT(FixedArray::kMapOffset == 0); | 341 STATIC_ASSERT(FixedArray::kMapOffset == 0); |
| 341 STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize); | 342 STATIC_ASSERT(FixedArray::kLengthOffset == kPointerSize); |
| 342 STATIC_ASSERT(FixedArray::kHeaderSize == 2 * kPointerSize); | 343 STATIC_ASSERT(FixedArray::kHeaderSize == 2 * kPointerSize); |
| 343 | 344 |
| 344 Object** former_start = HeapObject::RawField(elms, 0); | 345 Object** former_start = HeapObject::RawField(elms, 0); |
| 345 | 346 |
| 346 const int len = elms->length(); | 347 const int len = elms->length(); |
| 347 | 348 |
| 348 // Technically in new space this write might be omitted (except for | 349 // Technically in new space this write might be omitted (except for |
| (...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 711 if (n_arguments > 1) { | 712 if (n_arguments > 1) { |
| 712 Object* arg2 = args[2]; | 713 Object* arg2 = args[2]; |
| 713 if (arg2->IsSmi()) { | 714 if (arg2->IsSmi()) { |
| 714 delete_count = Smi::cast(arg2)->value(); | 715 delete_count = Smi::cast(arg2)->value(); |
| 715 } else { | 716 } else { |
| 716 return CallJsBuiltin("ArraySplice", args); | 717 return CallJsBuiltin("ArraySplice", args); |
| 717 } | 718 } |
| 718 } | 719 } |
| 719 int actual_delete_count = Min(Max(delete_count, 0), len - actual_start); | 720 int actual_delete_count = Min(Max(delete_count, 0), len - actual_start); |
| 720 | 721 |
| 721 JSArray* result_array = NULL; | |
| 722 if (actual_delete_count == 0) { | |
| 723 Object* result = AllocateEmptyJSArray(); | |
| 724 if (result->IsFailure()) return result; | |
| 725 result_array = JSArray::cast(result); | |
| 726 } else { | |
| 727 // Allocate result array. | |
| 728 Object* result = AllocateJSArray(); | |
| 729 if (result->IsFailure()) return result; | |
| 730 result_array = JSArray::cast(result); | |
| 731 | |
| 732 result = Heap::AllocateUninitializedFixedArray(actual_delete_count); | |
| 733 if (result->IsFailure()) return result; | |
| 734 FixedArray* result_elms = FixedArray::cast(result); | |
| 735 | |
| 736 AssertNoAllocation no_gc; | |
| 737 // Fill newly created array. | |
| 738 CopyElements(&no_gc, | |
| 739 result_elms, 0, | |
| 740 elms, actual_start, | |
| 741 actual_delete_count); | |
| 742 | |
| 743 // Set elements. | |
| 744 result_array->set_elements(result_elms); | |
| 745 | |
| 746 // Set the length. | |
| 747 result_array->set_length(Smi::FromInt(actual_delete_count)); | |
| 748 } | |
| 749 | |
| 750 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0; | 722 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0; |
| 751 | 723 |
| 752 int new_length = len - actual_delete_count + item_count; | 724 int new_length = len - actual_delete_count + item_count; |
| 753 | 725 |
| 754 if (item_count < actual_delete_count) { | 726 JSArray* result_array = NULL; |
| 755 // Shrink the array. | 727 if (Heap::new_space()->Contains(elms) && |
| 756 const bool trim_array = Heap::new_space()->Contains(elms) && | 728 (2 * actual_delete_count >= len + item_count)) { |
|
antonm
2010/04/05 12:21:13
looks like you're trying to keep the number of wri
iegorov
2010/04/05 13:01:49
My counts differ:
- when making inplace (terminolo
antonm
2010/04/05 14:34:30
Sorry, I should have been explicit but I didn't ac
| |
| 757 ((actual_start + item_count) < | 729 // Allocate array for receiver's array. |
| 758 (len - actual_delete_count - actual_start)); | 730 Object* result = AllocateJSArray(); |
| 759 if (trim_array) { | 731 if (result->IsFailure()) return result; |
| 760 const int delta = actual_delete_count - item_count; | 732 result_array = JSArray::cast(result); |
| 761 | 733 |
| 762 if (actual_start > 0) { | 734 result = Heap::AllocateUninitializedFixedArray(new_length); |
| 763 Object** start = elms->data_start(); | 735 if (result->IsFailure()) return result; |
| 764 memmove(start + delta, start, actual_start * kPointerSize); | 736 FixedArray* receiver_elms = FixedArray::cast(result); |
| 765 } | |
| 766 | 737 |
| 767 elms = LeftTrimFixedArray(elms, delta); | 738 int tail_length = len - actual_start - actual_delete_count; |
| 768 array->set_elements(elms, SKIP_WRITE_BARRIER); | 739 ASSERT(tail_length >= 0); |
| 740 | |
| 741 AssertNoAllocation no_gc; | |
| 742 // Fill with receiver elements. | |
| 743 if (actual_start > 0) { | |
| 744 CopyElements(&no_gc, | |
| 745 receiver_elms, 0, | |
| 746 elms, 0, | |
| 747 actual_start); | |
| 748 } | |
| 749 if (tail_length > 0) { | |
| 750 CopyElements(&no_gc, | |
| 751 receiver_elms, actual_start + item_count, | |
| 752 elms, actual_start + actual_delete_count, | |
| 753 tail_length); | |
| 754 } | |
| 755 array->set_elements(receiver_elms); | |
| 756 array->set_length(Smi::FromInt(new_length)); | |
| 757 | |
| 758 if (actual_start > 0) { | |
| 759 elms = LeftTrimFixedArray(elms, actual_start); | |
|
antonm
2010/04/05 12:21:13
good you noticed that---was going to bring your at
iegorov
2010/04/05 13:01:49
Yeah, testing rocks :)
On 2010/04/05 12:21:13, ant
| |
| 760 } | |
| 761 result_array->set_elements(elms, SKIP_WRITE_BARRIER); | |
| 762 result_array->set_length(Smi::FromInt(actual_delete_count)); | |
| 763 FillWithHoles(elms, actual_delete_count, len - actual_start); | |
| 764 | |
| 765 elms = receiver_elms; | |
| 766 } else { | |
| 767 if (actual_delete_count == 0) { | |
| 768 Object* result = AllocateEmptyJSArray(); | |
| 769 if (result->IsFailure()) return result; | |
| 770 result_array = JSArray::cast(result); | |
| 769 } else { | 771 } else { |
| 770 AssertNoAllocation no_gc; | 772 // Allocate result array. |
| 771 MoveElements(&no_gc, | 773 Object* result = AllocateJSArray(); |
| 772 elms, actual_start + item_count, | 774 if (result->IsFailure()) return result; |
| 773 elms, actual_start + actual_delete_count, | 775 result_array = JSArray::cast(result); |
| 774 (len - actual_delete_count - actual_start)); | |
| 775 FillWithHoles(elms, new_length, len); | |
| 776 } | |
| 777 } else if (item_count > actual_delete_count) { | |
| 778 // Currently fixed arrays cannot grow too big, so | |
| 779 // we should never hit this case. | |
| 780 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len)); | |
| 781 | 776 |
| 782 // Check if array need to grow. | 777 result = Heap::AllocateUninitializedFixedArray(actual_delete_count); |
| 783 if (new_length > elms->length()) { | 778 if (result->IsFailure()) return result; |
| 784 // New backing storage is needed. | 779 FixedArray* result_elms = FixedArray::cast(result); |
| 785 int capacity = new_length + (new_length >> 1) + 16; | |
| 786 Object* obj = Heap::AllocateUninitializedFixedArray(capacity); | |
| 787 if (obj->IsFailure()) return obj; | |
| 788 FixedArray* new_elms = FixedArray::cast(obj); | |
| 789 | 780 |
| 790 AssertNoAllocation no_gc; | 781 AssertNoAllocation no_gc; |
| 791 // Copy the part before actual_start as is. | 782 // Fill newly created array. |
| 792 if (actual_start > 0) { | 783 CopyElements(&no_gc, |
| 793 CopyElements(&no_gc, new_elms, 0, elms, 0, actual_start); | 784 result_elms, 0, |
| 785 elms, actual_start, | |
| 786 actual_delete_count); | |
| 787 | |
| 788 // Set elements. | |
| 789 result_array->set_elements(result_elms); | |
| 790 | |
| 791 // Set the length. | |
| 792 result_array->set_length(Smi::FromInt(actual_delete_count)); | |
| 793 } | |
| 794 | |
| 795 if (item_count < actual_delete_count) { | |
| 796 // Shrink the array. | |
| 797 const bool trim_array = Heap::new_space()->Contains(elms) && | |
| 798 ((actual_start + item_count) < | |
| 799 (len - actual_delete_count - actual_start)); | |
| 800 if (trim_array) { | |
| 801 const int delta = actual_delete_count - item_count; | |
| 802 | |
| 803 if (actual_start > 0) { | |
| 804 Object** start = elms->data_start(); | |
| 805 memmove(start + delta, start, actual_start * kPointerSize); | |
| 806 } | |
| 807 | |
| 808 elms = LeftTrimFixedArray(elms, delta); | |
| 809 array->set_elements(elms, SKIP_WRITE_BARRIER); | |
| 810 } else { | |
| 811 AssertNoAllocation no_gc; | |
| 812 MoveElements(&no_gc, | |
| 813 elms, actual_start + item_count, | |
| 814 elms, actual_start + actual_delete_count, | |
| 815 (len - actual_delete_count - actual_start)); | |
| 816 FillWithHoles(elms, new_length, len); | |
| 794 } | 817 } |
| 795 const int to_copy = len - actual_delete_count - actual_start; | 818 } else if (item_count > actual_delete_count) { |
| 796 if (to_copy > 0) { | 819 // Currently fixed arrays cannot grow too big, so |
| 797 CopyElements(&no_gc, | 820 // we should never hit this case. |
| 798 new_elms, actual_start + item_count, | 821 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len)); |
| 822 | |
| 823 // Check if array need to grow. | |
| 824 if (new_length > elms->length()) { | |
| 825 // New backing storage is needed. | |
| 826 int capacity = new_length + (new_length >> 1) + 16; | |
| 827 Object* obj = Heap::AllocateUninitializedFixedArray(capacity); | |
| 828 if (obj->IsFailure()) return obj; | |
| 829 FixedArray* new_elms = FixedArray::cast(obj); | |
| 830 | |
| 831 AssertNoAllocation no_gc; | |
| 832 // Copy the part before actual_start as is. | |
| 833 if (actual_start > 0) { | |
| 834 CopyElements(&no_gc, new_elms, 0, elms, 0, actual_start); | |
| 835 } | |
| 836 const int to_copy = len - actual_delete_count - actual_start; | |
| 837 if (to_copy > 0) { | |
| 838 CopyElements(&no_gc, | |
| 839 new_elms, actual_start + item_count, | |
| 840 elms, actual_start + actual_delete_count, | |
| 841 to_copy); | |
| 842 } | |
| 843 FillWithHoles(new_elms, new_length, capacity); | |
| 844 | |
| 845 elms = new_elms; | |
| 846 array->set_elements(elms); | |
| 847 } else { | |
| 848 AssertNoAllocation no_gc; | |
| 849 MoveElements(&no_gc, | |
| 850 elms, actual_start + item_count, | |
| 799 elms, actual_start + actual_delete_count, | 851 elms, actual_start + actual_delete_count, |
| 800 to_copy); | 852 (len - actual_delete_count - actual_start)); |
| 801 } | 853 } |
| 802 FillWithHoles(new_elms, new_length, capacity); | |
| 803 | |
| 804 elms = new_elms; | |
| 805 array->set_elements(elms); | |
| 806 } else { | |
| 807 AssertNoAllocation no_gc; | |
| 808 MoveElements(&no_gc, | |
| 809 elms, actual_start + item_count, | |
| 810 elms, actual_start + actual_delete_count, | |
| 811 (len - actual_delete_count - actual_start)); | |
| 812 } | 854 } |
| 813 } | 855 } |
| 814 | 856 |
| 815 AssertNoAllocation no_gc; | 857 AssertNoAllocation no_gc; |
| 816 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc); | 858 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc); |
| 817 for (int k = actual_start; k < actual_start + item_count; k++) { | 859 for (int k = actual_start; k < actual_start + item_count; k++) { |
| 818 elms->set(k, args[3 + k - actual_start], mode); | 860 elms->set(k, args[3 + k - actual_start], mode); |
| 819 } | 861 } |
| 820 | 862 |
| 821 // Set the length. | 863 // Set the length. |
| (...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1532 if (entry->contains(pc)) { | 1574 if (entry->contains(pc)) { |
| 1533 return names_[i]; | 1575 return names_[i]; |
| 1534 } | 1576 } |
| 1535 } | 1577 } |
| 1536 } | 1578 } |
| 1537 return NULL; | 1579 return NULL; |
| 1538 } | 1580 } |
| 1539 | 1581 |
| 1540 | 1582 |
| 1541 } } // namespace v8::internal | 1583 } } // namespace v8::internal |
| OLD | NEW |