| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 146 str.start(), str.length() * kUC16Size); | 146 str.start(), str.length() * kUC16Size); |
| 147 | 147 |
| 148 return answer; | 148 return answer; |
| 149 } | 149 } |
| 150 | 150 |
| 151 MaybeObject* Heap::CopyFixedArray(FixedArray* src) { | 151 MaybeObject* Heap::CopyFixedArray(FixedArray* src) { |
| 152 return CopyFixedArrayWithMap(src, src->map()); | 152 return CopyFixedArrayWithMap(src, src->map()); |
| 153 } | 153 } |
| 154 | 154 |
| 155 | 155 |
| 156 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { |
| 157 return CopyFixedDoubleArrayWithMap(src, src->map()); |
| 158 } |
| 159 |
| 160 |
| 156 MaybeObject* Heap::AllocateRaw(int size_in_bytes, | 161 MaybeObject* Heap::AllocateRaw(int size_in_bytes, |
| 157 AllocationSpace space, | 162 AllocationSpace space, |
| 158 AllocationSpace retry_space) { | 163 AllocationSpace retry_space) { |
| 159 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); | 164 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); |
| 160 ASSERT(space != NEW_SPACE || | 165 ASSERT(space != NEW_SPACE || |
| 161 retry_space == OLD_POINTER_SPACE || | 166 retry_space == OLD_POINTER_SPACE || |
| 162 retry_space == OLD_DATA_SPACE || | 167 retry_space == OLD_DATA_SPACE || |
| 163 retry_space == LO_SPACE); | 168 retry_space == LO_SPACE); |
| 164 #ifdef DEBUG | 169 #ifdef DEBUG |
| 165 if (FLAG_gc_interval >= 0 && | 170 if (FLAG_gc_interval >= 0 && |
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 361 reinterpret_cast<Object**>(src), | 366 reinterpret_cast<Object**>(src), |
| 362 byte_size / kPointerSize); | 367 byte_size / kPointerSize); |
| 363 } | 368 } |
| 364 | 369 |
| 365 | 370 |
| 366 void Heap::MoveBlock(Address dst, Address src, int byte_size) { | 371 void Heap::MoveBlock(Address dst, Address src, int byte_size) { |
| 367 ASSERT(IsAligned(byte_size, kPointerSize)); | 372 ASSERT(IsAligned(byte_size, kPointerSize)); |
| 368 | 373 |
| 369 int size_in_words = byte_size / kPointerSize; | 374 int size_in_words = byte_size / kPointerSize; |
| 370 | 375 |
| 371 if ((dst < src) || (dst >= (src + size_in_words))) { | 376 if ((dst < src) || (dst >= (src + byte_size))) { |
| 372 ASSERT((dst >= (src + size_in_words)) || | |
| 373 ((OffsetFrom(reinterpret_cast<Address>(src)) - | |
| 374 OffsetFrom(reinterpret_cast<Address>(dst))) >= kPointerSize)); | |
| 375 | |
| 376 Object** src_slot = reinterpret_cast<Object**>(src); | 377 Object** src_slot = reinterpret_cast<Object**>(src); |
| 377 Object** dst_slot = reinterpret_cast<Object**>(dst); | 378 Object** dst_slot = reinterpret_cast<Object**>(dst); |
| 378 Object** end_slot = src_slot + size_in_words; | 379 Object** end_slot = src_slot + size_in_words; |
| 379 | 380 |
| 380 while (src_slot != end_slot) { | 381 while (src_slot != end_slot) { |
| 381 *dst_slot++ = *src_slot++; | 382 *dst_slot++ = *src_slot++; |
| 382 } | 383 } |
| 383 } else { | 384 } else { |
| 384 memmove(dst, src, byte_size); | 385 memmove(dst, src, byte_size); |
| 385 } | 386 } |
| (...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 676 | 677 |
| 677 | 678 |
| 678 Heap* _inline_get_heap_() { | 679 Heap* _inline_get_heap_() { |
| 679 return HEAP; | 680 return HEAP; |
| 680 } | 681 } |
| 681 | 682 |
| 682 | 683 |
| 683 } } // namespace v8::internal | 684 } } // namespace v8::internal |
| 684 | 685 |
| 685 #endif // V8_HEAP_INL_H_ | 686 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |