OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
296 static FixedArrayBase* LeftTrimFixedArray(Heap* heap, | 296 static FixedArrayBase* LeftTrimFixedArray(Heap* heap, |
297 FixedArrayBase* elms, | 297 FixedArrayBase* elms, |
298 int to_trim) { | 298 int to_trim) { |
299 Map* map = elms->map(); | 299 Map* map = elms->map(); |
300 int entry_size; | 300 int entry_size; |
301 if (elms->IsFixedArray()) { | 301 if (elms->IsFixedArray()) { |
302 entry_size = kPointerSize; | 302 entry_size = kPointerSize; |
303 } else { | 303 } else { |
304 entry_size = kDoubleSize; | 304 entry_size = kDoubleSize; |
305 } | 305 } |
306 ASSERT(elms->map() != HEAP->fixed_cow_array_map()); | 306 ASSERT(elms->map() != heap->fixed_cow_array_map()); |
307 // For now this trick is only applied to fixed arrays in new and paged space. | 307 // For now this trick is only applied to fixed arrays in new and paged space. |
308 // In large object space the object's start must coincide with chunk | 308 // In large object space the object's start must coincide with chunk |
309 // and thus the trick is just not applicable. | 309 // and thus the trick is just not applicable. |
310 ASSERT(!HEAP->lo_space()->Contains(elms)); | 310 ASSERT(!heap->lo_space()->Contains(elms)); |
311 | 311 |
312 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); | 312 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); |
313 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); | 313 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); |
314 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); | 314 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); |
315 | 315 |
316 Object** former_start = HeapObject::RawField(elms, 0); | 316 Object** former_start = HeapObject::RawField(elms, 0); |
317 | 317 |
318 const int len = elms->length(); | 318 const int len = elms->length(); |
319 | 319 |
320 if (to_trim * entry_size > FixedArrayBase::kHeaderSize && | 320 if (to_trim * entry_size > FixedArrayBase::kHeaderSize && |
(...skipping 1534 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1855 return Handle<Code>(code_address); \ | 1855 return Handle<Code>(code_address); \ |
1856 } | 1856 } |
1857 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C) | 1857 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C) |
1858 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A) | 1858 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A) |
1859 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A) | 1859 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A) |
1860 #undef DEFINE_BUILTIN_ACCESSOR_C | 1860 #undef DEFINE_BUILTIN_ACCESSOR_C |
1861 #undef DEFINE_BUILTIN_ACCESSOR_A | 1861 #undef DEFINE_BUILTIN_ACCESSOR_A |
1862 | 1862 |
1863 | 1863 |
1864 } } // namespace v8::internal | 1864 } } // namespace v8::internal |
OLD | NEW |