Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: test/cctest/test-constantpool.cc

Issue 1162993006: Add support for Embedded Constant Pools for PPC and Arm (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Use of this source code is governed by a BSD-style license that can be
3 // modification, are permitted provided that the following conditions are 3 // found in the LICENSE file.
4 // met: 4
5 // 5 // Test embedded constant pool builder code.
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 // Test constant pool array code.
29 6
30 #include "src/v8.h" 7 #include "src/v8.h"
31 8
32 #include "src/factory.h" 9 #include "src/assembler.h"
33 #include "src/objects.h"
34 #include "test/cctest/cctest.h" 10 #include "test/cctest/cctest.h"
35 11
36 using namespace v8::internal; 12 using namespace v8::internal;
37 13
38 static ConstantPoolArray::Type kTypes[] = { ConstantPoolArray::INT64, 14 const ConstantPoolEntry::Type kPtrType = ConstantPoolEntry::INTPTR;
39 ConstantPoolArray::CODE_PTR, 15 const ConstantPoolEntry::Type kDblType = ConstantPoolEntry::DOUBLE;
40 ConstantPoolArray::HEAP_PTR, 16 const ConstantPoolEntry::Access kRegAccess = ConstantPoolEntry::REGULAR;
41 ConstantPoolArray::INT32 }; 17 const ConstantPoolEntry::Access kOvflAccess = ConstantPoolEntry::OVERFLOWED;
42 static ConstantPoolArray::LayoutSection kSmall = 18
43 ConstantPoolArray::SMALL_SECTION; 19 const int kReachBits = 6; // Use reach of 64-bytes to test overflow.
44 static ConstantPoolArray::LayoutSection kExtended = 20 const int kReach = 1 << kReachBits;
45 ConstantPoolArray::EXTENDED_SECTION; 21
46 22
47 Code* DummyCode(LocalContext* context) { 23 TEST(ConstantPoolPointers) {
48 CompileRun("function foo() {};"); 24 ConstantPoolBuilder builder(kReachBits, kReachBits);
49 i::Handle<i::JSFunction> fun = v8::Utils::OpenHandle( 25 const int kRegularCount = kReach / kPointerSize;
50 *v8::Local<v8::Function>::Cast( 26 ConstantPoolEntry::Access access;
51 (*context)->Global()->Get(v8_str("foo")))); 27 int pos = 0;
52 return fun->code(); 28 intptr_t value = 0;
53 } 29 bool sharing_ok = true;
54 30
55 31 CHECK(builder.IsEmpty());
56 TEST(ConstantPoolSmall) { 32 while (builder.NextAccess(kPtrType) == kRegAccess) {
57 LocalContext context; 33 access = builder.AddEntry(pos++, value++, sharing_ok);
58 Isolate* isolate = CcTest::i_isolate(); 34 CHECK_EQ(access, kRegAccess);
59 Factory* factory = isolate->factory(); 35 }
60 v8::HandleScope scope(context->GetIsolate()); 36 CHECK(!builder.IsEmpty());
61 37 CHECK_EQ(pos, kRegularCount);
62 // Check construction. 38
63 ConstantPoolArray::NumberOfEntries small(3, 1, 2, 1); 39 access = builder.AddEntry(pos, value, sharing_ok);
64 Handle<ConstantPoolArray> array = factory->NewConstantPoolArray(small); 40 CHECK_EQ(access, kOvflAccess);
65 41 }
66 int expected_counts[] = { 3, 1, 2, 1 }; 42
67 int expected_first_idx[] = { 0, 3, 4, 6 }; 43
68 int expected_last_idx[] = { 2, 3, 5, 6 }; 44 TEST(ConstantPoolDoubles) {
69 for (int i = 0; i < 4; i++) { 45 ConstantPoolBuilder builder(kReachBits, kReachBits);
70 CHECK_EQ(expected_counts[i], array->number_of_entries(kTypes[i], kSmall)); 46 const int kRegularCount = kReach / kDoubleSize;
71 CHECK_EQ(expected_first_idx[i], array->first_index(kTypes[i], kSmall)); 47 ConstantPoolEntry::Access access;
72 CHECK_EQ(expected_last_idx[i], array->last_index(kTypes[i], kSmall)); 48 int pos = 0;
73 } 49 double value = 0.0;
74 CHECK(!array->is_extended_layout()); 50
75 51 CHECK(builder.IsEmpty());
76 // Check getters and setters. 52 while (builder.NextAccess(kDblType) == kRegAccess) {
77 int64_t big_number = V8_2PART_UINT64_C(0x12345678, 9ABCDEF0); 53 access = builder.AddEntry(pos++, value);
78 Handle<Object> object = factory->NewHeapNumber(4.0, IMMUTABLE, TENURED); 54 value += 0.5;
79 Code* code = DummyCode(&context); 55 CHECK_EQ(access, kRegAccess);
80 array->set(0, big_number); 56 }
81 array->set(1, 0.5); 57 CHECK(!builder.IsEmpty());
82 array->set(2, 3e-24); 58 CHECK_EQ(pos, kRegularCount);
83 array->set(3, code->entry()); 59
84 array->set(4, code); 60 access = builder.AddEntry(pos, value);
85 array->set(5, *object); 61 CHECK_EQ(access, kOvflAccess);
86 array->set(6, 50); 62 }
87 CHECK_EQ(big_number, array->get_int64_entry(0)); 63
88 CHECK_EQ(0.5, array->get_int64_entry_as_double(1)); 64
89 CHECK_EQ(3e-24, array->get_int64_entry_as_double(2)); 65 TEST(ConstantPoolMixedTypes) {
90 CHECK_EQ(code->entry(), array->get_code_ptr_entry(3)); 66 ConstantPoolBuilder builder(kReachBits, kReachBits);
91 CHECK_EQ(code, array->get_heap_ptr_entry(4)); 67 const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) +
92 CHECK_EQ(*object, array->get_heap_ptr_entry(5)); 68 ((kPointerSize < kDoubleSize) ? 1 : 0));
93 CHECK_EQ(50, array->get_int32_entry(6)); 69 ConstantPoolEntry::Type type = kPtrType;
94 } 70 ConstantPoolEntry::Access access;
95 71 int pos = 0;
96 72 intptr_t ptrValue = 0;
97 TEST(ConstantPoolExtended) { 73 double dblValue = 0.0;
98 LocalContext context; 74 bool sharing_ok = true;
99 Isolate* isolate = CcTest::i_isolate(); 75
100 Factory* factory = isolate->factory(); 76 CHECK(builder.IsEmpty());
101 v8::HandleScope scope(context->GetIsolate()); 77 while (builder.NextAccess(type) == kRegAccess) {
102 78 if (type == kPtrType) {
103 // Check construction. 79 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
104 ConstantPoolArray::NumberOfEntries small(1, 2, 3, 4); 80 type = kDblType;
105 ConstantPoolArray::NumberOfEntries extended(5, 6, 7, 8); 81 } else {
106 Handle<ConstantPoolArray> array = 82 access = builder.AddEntry(pos++, dblValue);
107 factory->NewExtendedConstantPoolArray(small, extended); 83 dblValue += 0.5;
108 84 type = kPtrType;
109 // Check small section. 85 }
110 int small_counts[] = { 1, 2, 3, 4 }; 86 CHECK_EQ(access, kRegAccess);
111 int small_first_idx[] = { 0, 1, 3, 6 }; 87 }
112 int small_last_idx[] = { 0, 2, 5, 9 }; 88 CHECK(!builder.IsEmpty());
113 for (int i = 0; i < 4; i++) { 89 CHECK_EQ(pos, kRegularCount);
114 CHECK_EQ(small_counts[i], array->number_of_entries(kTypes[i], kSmall)); 90
115 CHECK_EQ(small_first_idx[i], array->first_index(kTypes[i], kSmall)); 91 access = builder.AddEntry(pos++, ptrValue, sharing_ok);
116 CHECK_EQ(small_last_idx[i], array->last_index(kTypes[i], kSmall)); 92 CHECK_EQ(access, kOvflAccess);
117 } 93 access = builder.AddEntry(pos, dblValue);
118 94 CHECK_EQ(access, kOvflAccess);
119 // Check extended layout. 95 }
120 CHECK(array->is_extended_layout()); 96
121 int extended_counts[] = { 5, 6, 7, 8 }; 97
122 int extended_first_idx[] = { 10, 15, 21, 28 }; 98 TEST(ConstantPoolMixedReach) {
123 int extended_last_idx[] = { 14, 20, 27, 35 }; 99 const int ptrReachBits = kReachBits + 2;
124 for (int i = 0; i < 4; i++) { 100 const int ptrReach = 1 << ptrReachBits;
125 CHECK_EQ(extended_counts[i], 101 const int dblReachBits = kReachBits;
126 array->number_of_entries(kTypes[i], kExtended)); 102 const int dblReach = kReach;
127 CHECK_EQ(extended_first_idx[i], array->first_index(kTypes[i], kExtended)); 103 const int dblRegularCount =
128 CHECK_EQ(extended_last_idx[i], array->last_index(kTypes[i], kExtended)); 104 Min(dblReach / kDoubleSize, ptrReach / (kDoubleSize + kPointerSize));
129 } 105 const int ptrRegularCount =
130 106 ((ptrReach - (dblRegularCount * (kDoubleSize + kPointerSize))) /
131 // Check small and large section's don't overlap. 107 kPointerSize) +
132 int64_t small_section_int64 = V8_2PART_UINT64_C(0x56781234, DEF09ABC); 108 dblRegularCount;
133 Code* small_section_code_ptr = DummyCode(&context); 109 ConstantPoolBuilder builder(ptrReachBits, dblReachBits);
134 Handle<Object> small_section_heap_ptr = 110 ConstantPoolEntry::Access access;
135 factory->NewHeapNumber(4.0, IMMUTABLE, TENURED); 111 int pos = 0;
136 int32_t small_section_int32 = 0xab12cd45; 112 intptr_t ptrValue = 0;
137 113 double dblValue = 0.0;
138 int64_t extended_section_int64 = V8_2PART_UINT64_C(0x12345678, 9ABCDEF0); 114 bool sharing_ok = true;
139 Code* extended_section_code_ptr = DummyCode(&context); 115 int ptrCount = 0;
140 Handle<Object> extended_section_heap_ptr = 116 int dblCount = 0;
141 factory->NewHeapNumber(5.0, IMMUTABLE, TENURED); 117
142 int32_t extended_section_int32 = 0xef67ab89; 118 CHECK(builder.IsEmpty());
143 119 while (builder.NextAccess(kDblType) == kRegAccess) {
144 for (int i = array->first_index(ConstantPoolArray::INT64, kSmall); 120 access = builder.AddEntry(pos++, dblValue);
145 i <= array->last_index(ConstantPoolArray::INT32, kSmall); i++) { 121 dblValue += 0.5;
146 if (i <= array->last_index(ConstantPoolArray::INT64, kSmall)) { 122 dblCount++;
147 array->set(i, small_section_int64); 123 CHECK_EQ(access, kRegAccess);
148 } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kSmall)) { 124
149 array->set(i, small_section_code_ptr->entry()); 125 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
150 } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kSmall)) { 126 ptrCount++;
151 array->set(i, *small_section_heap_ptr); 127 CHECK_EQ(access, kRegAccess);
152 } else { 128 }
153 CHECK(i <= array->last_index(ConstantPoolArray::INT32, kSmall)); 129 CHECK(!builder.IsEmpty());
154 array->set(i, small_section_int32); 130 CHECK_EQ(dblCount, dblRegularCount);
155 } 131
156 } 132 while (ptrCount < ptrRegularCount) {
157 for (int i = array->first_index(ConstantPoolArray::INT64, kExtended); 133 access = builder.AddEntry(pos++, dblValue);
158 i <= array->last_index(ConstantPoolArray::INT32, kExtended); i++) { 134 dblValue += 0.5;
159 if (i <= array->last_index(ConstantPoolArray::INT64, kExtended)) { 135 CHECK_EQ(access, kOvflAccess);
160 array->set(i, extended_section_int64); 136
161 } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kExtended)) { 137 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
162 array->set(i, extended_section_code_ptr->entry()); 138 ptrCount++;
163 } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kExtended)) { 139 CHECK_EQ(access, kRegAccess);
164 array->set(i, *extended_section_heap_ptr); 140 }
165 } else { 141 CHECK_EQ(builder.NextAccess(kPtrType), kOvflAccess);
166 CHECK(i <= array->last_index(ConstantPoolArray::INT32, kExtended)); 142
167 array->set(i, extended_section_int32); 143 access = builder.AddEntry(pos++, ptrValue, sharing_ok);
168 } 144 CHECK_EQ(access, kOvflAccess);
169 } 145 access = builder.AddEntry(pos, dblValue);
170 146 CHECK_EQ(access, kOvflAccess);
171 for (int i = array->first_index(ConstantPoolArray::INT64, kSmall); 147 }
172 i <= array->last_index(ConstantPoolArray::INT32, kSmall); i++) { 148
173 if (i <= array->last_index(ConstantPoolArray::INT64, kSmall)) { 149
174 CHECK_EQ(small_section_int64, array->get_int64_entry(i)); 150 TEST(ConstantPoolSharing) {
175 } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kSmall)) { 151 ConstantPoolBuilder builder(kReachBits, kReachBits);
176 CHECK_EQ(small_section_code_ptr->entry(), array->get_code_ptr_entry(i)); 152 const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) +
177 } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kSmall)) { 153 ((kPointerSize < kDoubleSize) ? 1 : 0));
178 CHECK_EQ(*small_section_heap_ptr, array->get_heap_ptr_entry(i)); 154 ConstantPoolEntry::Access access;
179 } else { 155
180 CHECK(i <= array->last_index(ConstantPoolArray::INT32, kSmall)); 156 CHECK(builder.IsEmpty());
181 CHECK_EQ(small_section_int32, array->get_int32_entry(i)); 157
182 } 158 ConstantPoolEntry::Type type = kPtrType;
183 } 159 int pos = 0;
184 for (int i = array->first_index(ConstantPoolArray::INT64, kExtended); 160 intptr_t ptrValue = 0;
185 i <= array->last_index(ConstantPoolArray::INT32, kExtended); i++) { 161 double dblValue = 0.0;
186 if (i <= array->last_index(ConstantPoolArray::INT64, kExtended)) { 162 bool sharing_ok = true;
187 CHECK_EQ(extended_section_int64, array->get_int64_entry(i)); 163 while (builder.NextAccess(type) == kRegAccess) {
188 } else if (i <= array->last_index(ConstantPoolArray::CODE_PTR, kExtended)) { 164 if (type == kPtrType) {
189 CHECK_EQ(extended_section_code_ptr->entry(), 165 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
190 array->get_code_ptr_entry(i)); 166 type = kDblType;
191 } else if (i <= array->last_index(ConstantPoolArray::HEAP_PTR, kExtended)) { 167 } else {
192 CHECK_EQ(*extended_section_heap_ptr, array->get_heap_ptr_entry(i)); 168 access = builder.AddEntry(pos++, dblValue);
193 } else { 169 dblValue += 0.5;
194 CHECK(i <= array->last_index(ConstantPoolArray::INT32, kExtended)); 170 type = kPtrType;
195 CHECK_EQ(extended_section_int32, array->get_int32_entry(i)); 171 }
196 } 172 CHECK_EQ(access, kRegAccess);
197 } 173 }
198 } 174 CHECK(!builder.IsEmpty());
199 175 CHECK_EQ(pos, kRegularCount);
200 176
201 static void CheckIterator(Handle<ConstantPoolArray> array, 177 type = kPtrType;
202 ConstantPoolArray::Type type, 178 ptrValue = 0;
203 int expected_indexes[], 179 dblValue = 0.0;
204 int count) { 180 while (pos < kRegularCount * 2) {
205 int i = 0; 181 if (type == kPtrType) {
206 ConstantPoolArray::Iterator iter(*array, type); 182 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
207 while (!iter.is_finished()) { 183 type = kDblType;
208 CHECK_EQ(expected_indexes[i++], iter.next_index()); 184 } else {
209 } 185 access = builder.AddEntry(pos++, dblValue);
210 CHECK_EQ(count, i); 186 dblValue += 0.5;
211 } 187 type = kPtrType;
212 188 }
213 189 CHECK_EQ(access, kRegAccess);
214 TEST(ConstantPoolIteratorSmall) { 190 }
215 LocalContext context; 191
216 Isolate* isolate = CcTest::i_isolate(); 192 access = builder.AddEntry(pos++, ptrValue, sharing_ok);
217 Factory* factory = isolate->factory(); 193 CHECK_EQ(access, kOvflAccess);
218 v8::HandleScope scope(context->GetIsolate()); 194 access = builder.AddEntry(pos, dblValue);
219 195 CHECK_EQ(access, kOvflAccess);
220 ConstantPoolArray::NumberOfEntries small(1, 5, 2, 0); 196 }
221 Handle<ConstantPoolArray> array = factory->NewConstantPoolArray(small); 197
222 198
223 int expected_int64_indexs[] = { 0 }; 199 TEST(ConstantPoolNoSharing) {
224 CheckIterator(array, ConstantPoolArray::INT64, expected_int64_indexs, 1); 200 ConstantPoolBuilder builder(kReachBits, kReachBits);
225 int expected_code_indexs[] = { 1, 2, 3, 4, 5 }; 201 const int kRegularCount = (((kReach / (kDoubleSize + kPointerSize)) * 2) +
226 CheckIterator(array, ConstantPoolArray::CODE_PTR, expected_code_indexs, 5); 202 ((kPointerSize < kDoubleSize) ? 1 : 0));
227 int expected_heap_indexs[] = { 6, 7 }; 203 ConstantPoolEntry::Access access;
228 CheckIterator(array, ConstantPoolArray::HEAP_PTR, expected_heap_indexs, 2); 204
229 int expected_int32_indexs[1]; 205 CHECK(builder.IsEmpty());
230 CheckIterator(array, ConstantPoolArray::INT32, expected_int32_indexs, 0); 206
231 } 207 ConstantPoolEntry::Type type = kPtrType;
232 208 int pos = 0;
233 209 intptr_t ptrValue = 0;
234 TEST(ConstantPoolIteratorExtended) { 210 double dblValue = 0.0;
235 LocalContext context; 211 bool sharing_ok = false;
236 Isolate* isolate = CcTest::i_isolate(); 212 while (builder.NextAccess(type) == kRegAccess) {
237 Factory* factory = isolate->factory(); 213 if (type == kPtrType) {
238 v8::HandleScope scope(context->GetIsolate()); 214 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
239 215 type = kDblType;
240 ConstantPoolArray::NumberOfEntries small(1, 0, 0, 4); 216 } else {
241 ConstantPoolArray::NumberOfEntries extended(5, 0, 3, 0); 217 access = builder.AddEntry(pos++, dblValue);
242 Handle<ConstantPoolArray> array = 218 dblValue += 0.5;
243 factory->NewExtendedConstantPoolArray(small, extended); 219 type = kPtrType;
244 220 }
245 int expected_int64_indexs[] = { 0, 5, 6, 7, 8, 9 }; 221 CHECK_EQ(access, kRegAccess);
246 CheckIterator(array, ConstantPoolArray::INT64, expected_int64_indexs, 6); 222 }
247 int expected_code_indexs[1]; 223 CHECK(!builder.IsEmpty());
248 CheckIterator(array, ConstantPoolArray::CODE_PTR, expected_code_indexs, 0); 224 CHECK_EQ(pos, kRegularCount);
249 int expected_heap_indexs[] = { 10, 11, 12 }; 225
250 CheckIterator(array, ConstantPoolArray::HEAP_PTR, expected_heap_indexs, 3); 226 type = kPtrType;
251 int expected_int32_indexs[] = { 1, 2, 3, 4 }; 227 ptrValue = 0;
252 CheckIterator(array, ConstantPoolArray::INT32, expected_int32_indexs, 4); 228 dblValue = 0.0;
253 } 229 sharing_ok = true;
254 230 while (pos < kRegularCount * 2) {
255 231 if (type == kPtrType) {
256 TEST(ConstantPoolPreciseGC) { 232 access = builder.AddEntry(pos++, ptrValue++, sharing_ok);
257 LocalContext context; 233 type = kDblType;
258 Isolate* isolate = CcTest::i_isolate(); 234 CHECK_EQ(access, kOvflAccess);
259 Heap* heap = isolate->heap(); 235 } else {
260 Factory* factory = isolate->factory(); 236 access = builder.AddEntry(pos++, dblValue);
261 v8::HandleScope scope(context->GetIsolate()); 237 dblValue += 0.5;
262 238 type = kPtrType;
263 ConstantPoolArray::NumberOfEntries small(1, 0, 0, 1); 239 CHECK_EQ(access, kRegAccess);
264 Handle<ConstantPoolArray> array = factory->NewConstantPoolArray(small); 240 }
265 241 }
266 // Check that the store buffer knows which entries are pointers and which are 242
267 // not. To do this, make non-pointer entries which look like new space 243 access = builder.AddEntry(pos++, ptrValue, sharing_ok);
268 // pointers but are actually invalid and ensure the GC doesn't try to move 244 CHECK_EQ(access, kOvflAccess);
269 // them. 245 access = builder.AddEntry(pos, dblValue);
270 Handle<HeapObject> object = factory->NewHeapNumber(4.0); 246 CHECK_EQ(access, kOvflAccess);
271 Object* raw_ptr = *object; 247 }
272 // If interpreted as a pointer, this should be right inside the heap number
273 // which will cause a crash when trying to lookup the 'map' pointer.
274 intptr_t invalid_ptr = reinterpret_cast<intptr_t>(raw_ptr) + kInt32Size;
275 int32_t invalid_ptr_int32 = static_cast<int32_t>(invalid_ptr);
276 int64_t invalid_ptr_int64 = static_cast<int64_t>(invalid_ptr);
277 array->set(0, invalid_ptr_int64);
278 array->set(1, invalid_ptr_int32);
279
280 // Ensure we perform a scan on scavenge for the constant pool's page.
281 MemoryChunk::FromAddress(array->address())->set_scan_on_scavenge(true);
282 heap->CollectGarbage(NEW_SPACE);
283
284 // Check the object was moved by GC.
285 CHECK_NE(*object, raw_ptr);
286
287 // Check the non-pointer entries weren't changed.
288 CHECK_EQ(invalid_ptr_int64, array->get_int64_entry(0));
289 CHECK_EQ(invalid_ptr_int32, array->get_int32_entry(1));
290 }
291
292
293 TEST(ConstantPoolCompacting) {
294 if (i::FLAG_never_compact) return;
295 i::FLAG_always_compact = true;
296 LocalContext context;
297 Isolate* isolate = CcTest::i_isolate();
298 Heap* heap = isolate->heap();
299 Factory* factory = isolate->factory();
300 v8::HandleScope scope(context->GetIsolate());
301
302 ConstantPoolArray::NumberOfEntries small(0, 0, 1, 0);
303 ConstantPoolArray::NumberOfEntries extended(0, 0, 1, 0);
304 Handle<ConstantPoolArray> array =
305 factory->NewExtendedConstantPoolArray(small, extended);
306
307 // Start a second old-space page so that the heap pointer added to the
308 // constant pool array ends up on the an evacuation candidate page.
309 Page* first_page = heap->old_space()->anchor()->next_page();
310 {
311 HandleScope scope(isolate);
312 int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
313 Handle<HeapObject> temp =
314 factory->NewFixedDoubleArray(dummy_array_size / kDoubleSize, TENURED);
315 CHECK(heap->InOldSpace(temp->address()));
316 Handle<HeapObject> heap_ptr =
317 factory->NewHeapNumber(5.0, IMMUTABLE, TENURED);
318 CHECK(heap->InOldSpace(heap_ptr->address()));
319 CHECK(!first_page->Contains(heap_ptr->address()));
320 array->set(0, *heap_ptr);
321 array->set(1, *heap_ptr);
322 }
323
324 // Check heap pointers are correctly updated on GC.
325 Object* old_ptr = array->get_heap_ptr_entry(0);
326 Handle<Object> object(old_ptr, isolate);
327 CHECK_EQ(old_ptr, *object);
328 CHECK_EQ(old_ptr, array->get_heap_ptr_entry(1));
329
330 // Force compacting garbage collection.
331 CHECK(FLAG_always_compact);
332 heap->CollectAllGarbage();
333
334 CHECK_NE(old_ptr, *object);
335 CHECK_EQ(*object, array->get_heap_ptr_entry(0));
336 CHECK_EQ(*object, array->get_heap_ptr_entry(1));
337 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698