Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(46)

Side by Side Diff: test/cctest/test-heap.cc

Issue 1512553002: [cctest] Move most heap related tests to test/cctest/heap and clean wrt IWYU (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fixed compile time error due to missing header file Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/cctest/test-dictionary.cc ('k') | test/cctest/test-incremental-marking.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 // TODO(jochen): Remove this after the setting is turned on globally.
29 #define V8_IMMINENT_DEPRECATION_WARNINGS
30
31 #include <stdlib.h>
32 #include <utility>
33
34 #include "src/compilation-cache.h"
35 #include "src/context-measure.h"
36 #include "src/deoptimizer.h"
37 #include "src/execution.h"
38 #include "src/factory.h"
39 #include "src/global-handles.h"
40 #include "src/heap/gc-tracer.h"
41 #include "src/heap/memory-reducer.h"
42 #include "src/ic/ic.h"
43 #include "src/macro-assembler.h"
44 #include "src/snapshot/snapshot.h"
45 #include "test/cctest/cctest.h"
46 #include "test/cctest/heap-tester.h"
47 #include "test/cctest/test-feedback-vector.h"
48
49
50 namespace v8 {
51 namespace internal {
52
53 static void CheckMap(Map* map, int type, int instance_size) {
54 CHECK(map->IsHeapObject());
55 #ifdef DEBUG
56 CHECK(CcTest::heap()->Contains(map));
57 #endif
58 CHECK_EQ(CcTest::heap()->meta_map(), map->map());
59 CHECK_EQ(type, map->instance_type());
60 CHECK_EQ(instance_size, map->instance_size());
61 }
62
63
64 TEST(HeapMaps) {
65 CcTest::InitializeVM();
66 Heap* heap = CcTest::heap();
67 CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
68 CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
69 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
70 CheckMap(heap->type##_map(), SIMD128_VALUE_TYPE, Type::kSize);
71 SIMD128_TYPES(SIMD128_TYPE)
72 #undef SIMD128_TYPE
73 CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
74 CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
75 }
76
77
78 static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
79 CHECK(obj->IsOddball());
80 Handle<Object> handle(obj, isolate);
81 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
82 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
83 }
84
85
86 static void CheckSmi(Isolate* isolate, int value, const char* string) {
87 Handle<Object> handle(Smi::FromInt(value), isolate);
88 Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
89 CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
90 }
91
92
93 static void CheckNumber(Isolate* isolate, double value, const char* string) {
94 Handle<Object> number = isolate->factory()->NewNumber(value);
95 CHECK(number->IsNumber());
96 Handle<Object> print_string =
97 Object::ToString(isolate, number).ToHandleChecked();
98 CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
99 }
100
101
102 static void CheckFindCodeObject(Isolate* isolate) {
103 // Test FindCodeObject
104 #define __ assm.
105
106 Assembler assm(isolate, NULL, 0);
107
108 __ nop(); // supported on all architectures
109
110 CodeDesc desc;
111 assm.GetCode(&desc);
112 Handle<Code> code = isolate->factory()->NewCode(
113 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
114 CHECK(code->IsCode());
115
116 HeapObject* obj = HeapObject::cast(*code);
117 Address obj_addr = obj->address();
118
119 for (int i = 0; i < obj->Size(); i += kPointerSize) {
120 Object* found = isolate->FindCodeObject(obj_addr + i);
121 CHECK_EQ(*code, found);
122 }
123
124 Handle<Code> copy = isolate->factory()->NewCode(
125 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
126 HeapObject* obj_copy = HeapObject::cast(*copy);
127 Object* not_right = isolate->FindCodeObject(obj_copy->address() +
128 obj_copy->Size() / 2);
129 CHECK(not_right != *code);
130 }
131
132
133 TEST(HandleNull) {
134 CcTest::InitializeVM();
135 Isolate* isolate = CcTest::i_isolate();
136 HandleScope outer_scope(isolate);
137 LocalContext context;
138 Handle<Object> n(static_cast<Object*>(nullptr), isolate);
139 CHECK(!n.is_null());
140 }
141
142
143 TEST(HeapObjects) {
144 CcTest::InitializeVM();
145 Isolate* isolate = CcTest::i_isolate();
146 Factory* factory = isolate->factory();
147 Heap* heap = isolate->heap();
148
149 HandleScope sc(isolate);
150 Handle<Object> value = factory->NewNumber(1.000123);
151 CHECK(value->IsHeapNumber());
152 CHECK(value->IsNumber());
153 CHECK_EQ(1.000123, value->Number());
154
155 value = factory->NewNumber(1.0);
156 CHECK(value->IsSmi());
157 CHECK(value->IsNumber());
158 CHECK_EQ(1.0, value->Number());
159
160 value = factory->NewNumberFromInt(1024);
161 CHECK(value->IsSmi());
162 CHECK(value->IsNumber());
163 CHECK_EQ(1024.0, value->Number());
164
165 value = factory->NewNumberFromInt(Smi::kMinValue);
166 CHECK(value->IsSmi());
167 CHECK(value->IsNumber());
168 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
169
170 value = factory->NewNumberFromInt(Smi::kMaxValue);
171 CHECK(value->IsSmi());
172 CHECK(value->IsNumber());
173 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
174
175 #if !defined(V8_TARGET_ARCH_64_BIT)
176 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
177 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
178 CHECK(value->IsHeapNumber());
179 CHECK(value->IsNumber());
180 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
181 #endif
182
183 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
184 CHECK(value->IsHeapNumber());
185 CHECK(value->IsNumber());
186 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
187 value->Number());
188
189 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
190 CHECK(value->IsHeapNumber());
191 CHECK(value->IsNumber());
192 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
193 value->Number());
194
195 // nan oddball checks
196 CHECK(factory->nan_value()->IsNumber());
197 CHECK(std::isnan(factory->nan_value()->Number()));
198
199 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
200 CHECK(s->IsString());
201 CHECK_EQ(10, s->length());
202
203 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
204 Handle<JSGlobalObject> global(
205 CcTest::i_isolate()->context()->global_object());
206 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
207
208 // Check ToString for oddballs
209 CheckOddball(isolate, heap->true_value(), "true");
210 CheckOddball(isolate, heap->false_value(), "false");
211 CheckOddball(isolate, heap->null_value(), "null");
212 CheckOddball(isolate, heap->undefined_value(), "undefined");
213
214 // Check ToString for Smis
215 CheckSmi(isolate, 0, "0");
216 CheckSmi(isolate, 42, "42");
217 CheckSmi(isolate, -42, "-42");
218
219 // Check ToString for Numbers
220 CheckNumber(isolate, 1.1, "1.1");
221
222 CheckFindCodeObject(isolate);
223 }
224
225
226 template <typename T, typename LANE_TYPE, int LANES>
227 static void CheckSimdValue(T* value, LANE_TYPE lane_values[LANES],
228 LANE_TYPE other_value) {
229 // Check against lane_values, and check that all lanes can be set to
230 // other_value without disturbing the other lanes.
231 for (int i = 0; i < LANES; i++) {
232 CHECK_EQ(lane_values[i], value->get_lane(i));
233 }
234 for (int i = 0; i < LANES; i++) {
235 value->set_lane(i, other_value); // change the value
236 for (int j = 0; j < LANES; j++) {
237 if (i != j)
238 CHECK_EQ(lane_values[j], value->get_lane(j));
239 else
240 CHECK_EQ(other_value, value->get_lane(j));
241 }
242 value->set_lane(i, lane_values[i]); // restore the lane
243 }
244 CHECK(value->BooleanValue()); // SIMD values are 'true'.
245 }
246
247
248 TEST(SimdObjects) {
249 CcTest::InitializeVM();
250 Isolate* isolate = CcTest::i_isolate();
251 Factory* factory = isolate->factory();
252
253 HandleScope sc(isolate);
254
255 // Float32x4
256 {
257 float lanes[4] = {1, 2, 3, 4};
258 float quiet_NaN = std::numeric_limits<float>::quiet_NaN();
259 float signaling_NaN = std::numeric_limits<float>::signaling_NaN();
260
261 Handle<Float32x4> value = factory->NewFloat32x4(lanes);
262 CHECK(value->IsFloat32x4());
263 CheckSimdValue<Float32x4, float, 4>(*value, lanes, 3.14f);
264
265 // Check special lane values.
266 value->set_lane(1, -0.0);
267 CHECK_EQ(-0.0f, value->get_lane(1));
268 CHECK(std::signbit(value->get_lane(1))); // Sign bit should be preserved.
269 value->set_lane(2, quiet_NaN);
270 CHECK(std::isnan(value->get_lane(2)));
271 value->set_lane(3, signaling_NaN);
272 CHECK(std::isnan(value->get_lane(3)));
273
274 #ifdef OBJECT_PRINT
275 // Check value printing.
276 {
277 value = factory->NewFloat32x4(lanes);
278 std::ostringstream os;
279 value->Float32x4Print(os);
280 CHECK_EQ("1, 2, 3, 4", os.str());
281 }
282 {
283 float special_lanes[4] = {0, -0.0, quiet_NaN, signaling_NaN};
284 value = factory->NewFloat32x4(special_lanes);
285 std::ostringstream os;
286 value->Float32x4Print(os);
287 // Value printing doesn't preserve signed zeroes.
288 CHECK_EQ("0, 0, NaN, NaN", os.str());
289 }
290 #endif // OBJECT_PRINT
291 }
292 // Int32x4
293 {
294 int32_t lanes[4] = {1, 2, 3, 4};
295
296 Handle<Int32x4> value = factory->NewInt32x4(lanes);
297 CHECK(value->IsInt32x4());
298 CheckSimdValue<Int32x4, int32_t, 4>(*value, lanes, 3);
299
300 #ifdef OBJECT_PRINT
301 std::ostringstream os;
302 value->Int32x4Print(os);
303 CHECK_EQ("1, 2, 3, 4", os.str());
304 #endif // OBJECT_PRINT
305 }
306 // Uint32x4
307 {
308 uint32_t lanes[4] = {1, 2, 3, 4};
309
310 Handle<Uint32x4> value = factory->NewUint32x4(lanes);
311 CHECK(value->IsUint32x4());
312 CheckSimdValue<Uint32x4, uint32_t, 4>(*value, lanes, 3);
313
314 #ifdef OBJECT_PRINT
315 std::ostringstream os;
316 value->Uint32x4Print(os);
317 CHECK_EQ("1, 2, 3, 4", os.str());
318 #endif // OBJECT_PRINT
319 }
320 // Bool32x4
321 {
322 bool lanes[4] = {true, false, true, false};
323
324 Handle<Bool32x4> value = factory->NewBool32x4(lanes);
325 CHECK(value->IsBool32x4());
326 CheckSimdValue<Bool32x4, bool, 4>(*value, lanes, false);
327
328 #ifdef OBJECT_PRINT
329 std::ostringstream os;
330 value->Bool32x4Print(os);
331 CHECK_EQ("true, false, true, false", os.str());
332 #endif // OBJECT_PRINT
333 }
334 // Int16x8
335 {
336 int16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
337
338 Handle<Int16x8> value = factory->NewInt16x8(lanes);
339 CHECK(value->IsInt16x8());
340 CheckSimdValue<Int16x8, int16_t, 8>(*value, lanes, 32767);
341
342 #ifdef OBJECT_PRINT
343 std::ostringstream os;
344 value->Int16x8Print(os);
345 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
346 #endif // OBJECT_PRINT
347 }
348 // Uint16x8
349 {
350 uint16_t lanes[8] = {1, 2, 3, 4, 5, 6, 7, 8};
351
352 Handle<Uint16x8> value = factory->NewUint16x8(lanes);
353 CHECK(value->IsUint16x8());
354 CheckSimdValue<Uint16x8, uint16_t, 8>(*value, lanes, 32767);
355
356 #ifdef OBJECT_PRINT
357 std::ostringstream os;
358 value->Uint16x8Print(os);
359 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8", os.str());
360 #endif // OBJECT_PRINT
361 }
362 // Bool16x8
363 {
364 bool lanes[8] = {true, false, true, false, true, false, true, false};
365
366 Handle<Bool16x8> value = factory->NewBool16x8(lanes);
367 CHECK(value->IsBool16x8());
368 CheckSimdValue<Bool16x8, bool, 8>(*value, lanes, false);
369
370 #ifdef OBJECT_PRINT
371 std::ostringstream os;
372 value->Bool16x8Print(os);
373 CHECK_EQ("true, false, true, false, true, false, true, false", os.str());
374 #endif // OBJECT_PRINT
375 }
376 // Int8x16
377 {
378 int8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
379
380 Handle<Int8x16> value = factory->NewInt8x16(lanes);
381 CHECK(value->IsInt8x16());
382 CheckSimdValue<Int8x16, int8_t, 16>(*value, lanes, 127);
383
384 #ifdef OBJECT_PRINT
385 std::ostringstream os;
386 value->Int8x16Print(os);
387 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
388 #endif // OBJECT_PRINT
389 }
390 // Uint8x16
391 {
392 uint8_t lanes[16] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
393
394 Handle<Uint8x16> value = factory->NewUint8x16(lanes);
395 CHECK(value->IsUint8x16());
396 CheckSimdValue<Uint8x16, uint8_t, 16>(*value, lanes, 127);
397
398 #ifdef OBJECT_PRINT
399 std::ostringstream os;
400 value->Uint8x16Print(os);
401 CHECK_EQ("1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16", os.str());
402 #endif // OBJECT_PRINT
403 }
404 // Bool8x16
405 {
406 bool lanes[16] = {true, false, true, false, true, false, true, false,
407 true, false, true, false, true, false, true, false};
408
409 Handle<Bool8x16> value = factory->NewBool8x16(lanes);
410 CHECK(value->IsBool8x16());
411 CheckSimdValue<Bool8x16, bool, 16>(*value, lanes, false);
412
413 #ifdef OBJECT_PRINT
414 std::ostringstream os;
415 value->Bool8x16Print(os);
416 CHECK_EQ(
417 "true, false, true, false, true, false, true, false, true, false, "
418 "true, false, true, false, true, false",
419 os.str());
420 #endif // OBJECT_PRINT
421 }
422 }
423
424
425 TEST(Tagging) {
426 CcTest::InitializeVM();
427 int request = 24;
428 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
429 CHECK(Smi::FromInt(42)->IsSmi());
430 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
431 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
432 }
433
434
435 TEST(GarbageCollection) {
436 CcTest::InitializeVM();
437 Isolate* isolate = CcTest::i_isolate();
438 Heap* heap = isolate->heap();
439 Factory* factory = isolate->factory();
440
441 HandleScope sc(isolate);
442 // Check GC.
443 heap->CollectGarbage(NEW_SPACE);
444
445 Handle<JSGlobalObject> global(
446 CcTest::i_isolate()->context()->global_object());
447 Handle<String> name = factory->InternalizeUtf8String("theFunction");
448 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
449 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
450 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
451 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
452 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
453
454 {
455 HandleScope inner_scope(isolate);
456 // Allocate a function and keep it in global object's property.
457 Handle<JSFunction> function = factory->NewFunction(name);
458 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
459 // Allocate an object. Unrooted after leaving the scope.
460 Handle<JSObject> obj = factory->NewJSObject(function);
461 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
462 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
463
464 CHECK_EQ(Smi::FromInt(23),
465 *Object::GetProperty(obj, prop_name).ToHandleChecked());
466 CHECK_EQ(Smi::FromInt(24),
467 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
468 }
469
470 heap->CollectGarbage(NEW_SPACE);
471
472 // Function should be alive.
473 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
474 // Check function is retained.
475 Handle<Object> func_value =
476 Object::GetProperty(global, name).ToHandleChecked();
477 CHECK(func_value->IsJSFunction());
478 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
479
480 {
481 HandleScope inner_scope(isolate);
482 // Allocate another object, make it reachable from global.
483 Handle<JSObject> obj = factory->NewJSObject(function);
484 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
485 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
486 }
487
488 // After gc, it should survive.
489 heap->CollectGarbage(NEW_SPACE);
490
491 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
492 Handle<Object> obj =
493 Object::GetProperty(global, obj_name).ToHandleChecked();
494 CHECK(obj->IsJSObject());
495 CHECK_EQ(Smi::FromInt(23),
496 *Object::GetProperty(obj, prop_name).ToHandleChecked());
497 }
498
499
500 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
501 HandleScope scope(isolate);
502 Handle<String> s = isolate->factory()->NewStringFromUtf8(
503 CStrVector(string)).ToHandleChecked();
504 CHECK_EQ(StrLength(string), s->length());
505 for (int index = 0; index < s->length(); index++) {
506 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
507 }
508 }
509
510
511 TEST(String) {
512 CcTest::InitializeVM();
513 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
514
515 VerifyStringAllocation(isolate, "a");
516 VerifyStringAllocation(isolate, "ab");
517 VerifyStringAllocation(isolate, "abc");
518 VerifyStringAllocation(isolate, "abcd");
519 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
520 }
521
522
523 TEST(LocalHandles) {
524 CcTest::InitializeVM();
525 Isolate* isolate = CcTest::i_isolate();
526 Factory* factory = isolate->factory();
527
528 v8::HandleScope scope(CcTest::isolate());
529 const char* name = "Kasper the spunky";
530 Handle<String> string = factory->NewStringFromAsciiChecked(name);
531 CHECK_EQ(StrLength(name), string->length());
532 }
533
534
535 TEST(GlobalHandles) {
536 CcTest::InitializeVM();
537 Isolate* isolate = CcTest::i_isolate();
538 Heap* heap = isolate->heap();
539 Factory* factory = isolate->factory();
540 GlobalHandles* global_handles = isolate->global_handles();
541
542 Handle<Object> h1;
543 Handle<Object> h2;
544 Handle<Object> h3;
545 Handle<Object> h4;
546
547 {
548 HandleScope scope(isolate);
549
550 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
551 Handle<Object> u = factory->NewNumber(1.12344);
552
553 h1 = global_handles->Create(*i);
554 h2 = global_handles->Create(*u);
555 h3 = global_handles->Create(*i);
556 h4 = global_handles->Create(*u);
557 }
558
559 // after gc, it should survive
560 heap->CollectGarbage(NEW_SPACE);
561
562 CHECK((*h1)->IsString());
563 CHECK((*h2)->IsHeapNumber());
564 CHECK((*h3)->IsString());
565 CHECK((*h4)->IsHeapNumber());
566
567 CHECK_EQ(*h3, *h1);
568 GlobalHandles::Destroy(h1.location());
569 GlobalHandles::Destroy(h3.location());
570
571 CHECK_EQ(*h4, *h2);
572 GlobalHandles::Destroy(h2.location());
573 GlobalHandles::Destroy(h4.location());
574 }
575
576
577 static bool WeakPointerCleared = false;
578
579 static void TestWeakGlobalHandleCallback(
580 const v8::WeakCallbackData<v8::Value, void>& data) {
581 std::pair<v8::Persistent<v8::Value>*, int>* p =
582 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
583 data.GetParameter());
584 if (p->second == 1234) WeakPointerCleared = true;
585 p->first->Reset();
586 }
587
588
589 TEST(WeakGlobalHandlesScavenge) {
590 i::FLAG_stress_compaction = false;
591 CcTest::InitializeVM();
592 Isolate* isolate = CcTest::i_isolate();
593 Heap* heap = isolate->heap();
594 Factory* factory = isolate->factory();
595 GlobalHandles* global_handles = isolate->global_handles();
596
597 WeakPointerCleared = false;
598
599 Handle<Object> h1;
600 Handle<Object> h2;
601
602 {
603 HandleScope scope(isolate);
604
605 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
606 Handle<Object> u = factory->NewNumber(1.12344);
607
608 h1 = global_handles->Create(*i);
609 h2 = global_handles->Create(*u);
610 }
611
612 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
613 GlobalHandles::MakeWeak(h2.location(),
614 reinterpret_cast<void*>(&handle_and_id),
615 &TestWeakGlobalHandleCallback);
616
617 // Scavenge treats weak pointers as normal roots.
618 heap->CollectGarbage(NEW_SPACE);
619
620 CHECK((*h1)->IsString());
621 CHECK((*h2)->IsHeapNumber());
622
623 CHECK(!WeakPointerCleared);
624 CHECK(!global_handles->IsNearDeath(h2.location()));
625 CHECK(!global_handles->IsNearDeath(h1.location()));
626
627 GlobalHandles::Destroy(h1.location());
628 GlobalHandles::Destroy(h2.location());
629 }
630
631
632 TEST(WeakGlobalHandlesMark) {
633 CcTest::InitializeVM();
634 Isolate* isolate = CcTest::i_isolate();
635 Heap* heap = isolate->heap();
636 Factory* factory = isolate->factory();
637 GlobalHandles* global_handles = isolate->global_handles();
638
639 WeakPointerCleared = false;
640
641 Handle<Object> h1;
642 Handle<Object> h2;
643
644 {
645 HandleScope scope(isolate);
646
647 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
648 Handle<Object> u = factory->NewNumber(1.12344);
649
650 h1 = global_handles->Create(*i);
651 h2 = global_handles->Create(*u);
652 }
653
654 // Make sure the objects are promoted.
655 heap->CollectGarbage(OLD_SPACE);
656 heap->CollectGarbage(NEW_SPACE);
657 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
658
659 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
660 GlobalHandles::MakeWeak(h2.location(),
661 reinterpret_cast<void*>(&handle_and_id),
662 &TestWeakGlobalHandleCallback);
663 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
664 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
665
666 // Incremental marking potentially marked handles before they turned weak.
667 heap->CollectAllGarbage();
668
669 CHECK((*h1)->IsString());
670
671 CHECK(WeakPointerCleared);
672 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
673
674 GlobalHandles::Destroy(h1.location());
675 }
676
677
678 TEST(DeleteWeakGlobalHandle) {
679 i::FLAG_stress_compaction = false;
680 CcTest::InitializeVM();
681 Isolate* isolate = CcTest::i_isolate();
682 Heap* heap = isolate->heap();
683 Factory* factory = isolate->factory();
684 GlobalHandles* global_handles = isolate->global_handles();
685
686 WeakPointerCleared = false;
687
688 Handle<Object> h;
689
690 {
691 HandleScope scope(isolate);
692
693 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
694 h = global_handles->Create(*i);
695 }
696
697 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
698 GlobalHandles::MakeWeak(h.location(),
699 reinterpret_cast<void*>(&handle_and_id),
700 &TestWeakGlobalHandleCallback);
701
702 // Scanvenge does not recognize weak reference.
703 heap->CollectGarbage(NEW_SPACE);
704
705 CHECK(!WeakPointerCleared);
706
707 // Mark-compact treats weak reference properly.
708 heap->CollectGarbage(OLD_SPACE);
709
710 CHECK(WeakPointerCleared);
711 }
712
713
714 TEST(BytecodeArray) {
715 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
716 static const int kRawBytesSize = sizeof(kRawBytes);
717 static const int kFrameSize = 32;
718 static const int kParameterCount = 2;
719
720 i::FLAG_manual_evacuation_candidates_selection = true;
721 CcTest::InitializeVM();
722 Isolate* isolate = CcTest::i_isolate();
723 Heap* heap = isolate->heap();
724 Factory* factory = isolate->factory();
725 HandleScope scope(isolate);
726
727 SimulateFullSpace(heap->old_space());
728 Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
729 for (int i = 0; i < 5; i++) {
730 Handle<Object> number = factory->NewHeapNumber(i);
731 constant_pool->set(i, *number);
732 }
733
734 // Allocate and initialize BytecodeArray
735 Handle<BytecodeArray> array = factory->NewBytecodeArray(
736 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
737
738 CHECK(array->IsBytecodeArray());
739 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
740 CHECK_EQ(array->frame_size(), kFrameSize);
741 CHECK_EQ(array->parameter_count(), kParameterCount);
742 CHECK_EQ(array->constant_pool(), *constant_pool);
743 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
744 CHECK_GE(array->address() + array->BytecodeArraySize(),
745 array->GetFirstBytecodeAddress() + array->length());
746 for (int i = 0; i < kRawBytesSize; i++) {
747 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
748 CHECK_EQ(array->get(i), kRawBytes[i]);
749 }
750
751 FixedArray* old_constant_pool_address = *constant_pool;
752
753 // Perform a full garbage collection and force the constant pool to be on an
754 // evacuation candidate.
755 Page* evac_page = Page::FromAddress(constant_pool->address());
756 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
757 heap->CollectAllGarbage();
758
759 // BytecodeArray should survive.
760 CHECK_EQ(array->length(), kRawBytesSize);
761 CHECK_EQ(array->frame_size(), kFrameSize);
762 for (int i = 0; i < kRawBytesSize; i++) {
763 CHECK_EQ(array->get(i), kRawBytes[i]);
764 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
765 }
766
767 // Constant pool should have been migrated.
768 CHECK_EQ(array->constant_pool(), *constant_pool);
769 CHECK_NE(array->constant_pool(), old_constant_pool_address);
770 }
771
772
773 static const char* not_so_random_string_table[] = {
774 "abstract",
775 "boolean",
776 "break",
777 "byte",
778 "case",
779 "catch",
780 "char",
781 "class",
782 "const",
783 "continue",
784 "debugger",
785 "default",
786 "delete",
787 "do",
788 "double",
789 "else",
790 "enum",
791 "export",
792 "extends",
793 "false",
794 "final",
795 "finally",
796 "float",
797 "for",
798 "function",
799 "goto",
800 "if",
801 "implements",
802 "import",
803 "in",
804 "instanceof",
805 "int",
806 "interface",
807 "long",
808 "native",
809 "new",
810 "null",
811 "package",
812 "private",
813 "protected",
814 "public",
815 "return",
816 "short",
817 "static",
818 "super",
819 "switch",
820 "synchronized",
821 "this",
822 "throw",
823 "throws",
824 "transient",
825 "true",
826 "try",
827 "typeof",
828 "var",
829 "void",
830 "volatile",
831 "while",
832 "with",
833 0
834 };
835
836
837 static void CheckInternalizedStrings(const char** strings) {
838 Isolate* isolate = CcTest::i_isolate();
839 Factory* factory = isolate->factory();
840 for (const char* string = *strings; *strings != 0; string = *strings++) {
841 HandleScope scope(isolate);
842 Handle<String> a =
843 isolate->factory()->InternalizeUtf8String(CStrVector(string));
844 // InternalizeUtf8String may return a failure if a GC is needed.
845 CHECK(a->IsInternalizedString());
846 Handle<String> b = factory->InternalizeUtf8String(string);
847 CHECK_EQ(*b, *a);
848 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
849 b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
850 CHECK_EQ(*b, *a);
851 CHECK(b->IsUtf8EqualTo(CStrVector(string)));
852 }
853 }
854
855
856 TEST(StringTable) {
857 CcTest::InitializeVM();
858
859 v8::HandleScope sc(CcTest::isolate());
860 CheckInternalizedStrings(not_so_random_string_table);
861 CheckInternalizedStrings(not_so_random_string_table);
862 }
863
864
865 TEST(FunctionAllocation) {
866 CcTest::InitializeVM();
867 Isolate* isolate = CcTest::i_isolate();
868 Factory* factory = isolate->factory();
869
870 v8::HandleScope sc(CcTest::isolate());
871 Handle<String> name = factory->InternalizeUtf8String("theFunction");
872 Handle<JSFunction> function = factory->NewFunction(name);
873
874 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
875 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
876
877 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
878 Handle<JSObject> obj = factory->NewJSObject(function);
879 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
880 CHECK_EQ(Smi::FromInt(23),
881 *Object::GetProperty(obj, prop_name).ToHandleChecked());
882 // Check that we can add properties to function objects.
883 JSReceiver::SetProperty(function, prop_name, twenty_four, SLOPPY).Check();
884 CHECK_EQ(Smi::FromInt(24),
885 *Object::GetProperty(function, prop_name).ToHandleChecked());
886 }
887
888
889 TEST(ObjectProperties) {
890 CcTest::InitializeVM();
891 Isolate* isolate = CcTest::i_isolate();
892 Factory* factory = isolate->factory();
893
894 v8::HandleScope sc(CcTest::isolate());
895 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
896 Handle<Object> object = Object::GetProperty(
897 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
898 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
899 Handle<JSObject> obj = factory->NewJSObject(constructor);
900 Handle<String> first = factory->InternalizeUtf8String("first");
901 Handle<String> second = factory->InternalizeUtf8String("second");
902
903 Handle<Smi> one(Smi::FromInt(1), isolate);
904 Handle<Smi> two(Smi::FromInt(2), isolate);
905
906 // check for empty
907 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
908
909 // add first
910 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
911 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
912
913 // delete first
914 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
915 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
916
917 // add first and then second
918 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
919 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
920 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
921 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
922
923 // delete first and then second
924 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
925 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
926 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
927 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
928 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
929
930 // add first and then second
931 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
932 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
933 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
934 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
935
936 // delete second and then first
937 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, second, SLOPPY));
938 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
939 CHECK(Just(true) == JSReceiver::DeleteProperty(obj, first, SLOPPY));
940 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
941 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
942
943 // check string and internalized string match
944 const char* string1 = "fisk";
945 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
946 JSReceiver::SetProperty(obj, s1, one, SLOPPY).Check();
947 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
948 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
949
950 // check internalized string and string match
951 const char* string2 = "fugl";
952 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
953 JSReceiver::SetProperty(obj, s2_string, one, SLOPPY).Check();
954 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
955 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
956 }
957
958
959 TEST(JSObjectMaps) {
960 CcTest::InitializeVM();
961 Isolate* isolate = CcTest::i_isolate();
962 Factory* factory = isolate->factory();
963
964 v8::HandleScope sc(CcTest::isolate());
965 Handle<String> name = factory->InternalizeUtf8String("theFunction");
966 Handle<JSFunction> function = factory->NewFunction(name);
967
968 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
969 Handle<JSObject> obj = factory->NewJSObject(function);
970 Handle<Map> initial_map(function->initial_map());
971
972 // Set a propery
973 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
974 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
975 CHECK_EQ(Smi::FromInt(23),
976 *Object::GetProperty(obj, prop_name).ToHandleChecked());
977
978 // Check the map has changed
979 CHECK(*initial_map != obj->map());
980 }
981
982
983 TEST(JSArray) {
984 CcTest::InitializeVM();
985 Isolate* isolate = CcTest::i_isolate();
986 Factory* factory = isolate->factory();
987
988 v8::HandleScope sc(CcTest::isolate());
989 Handle<String> name = factory->InternalizeUtf8String("Array");
990 Handle<Object> fun_obj = Object::GetProperty(
991 CcTest::i_isolate()->global_object(), name).ToHandleChecked();
992 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
993
994 // Allocate the object.
995 Handle<Object> element;
996 Handle<JSObject> object = factory->NewJSObject(function);
997 Handle<JSArray> array = Handle<JSArray>::cast(object);
998 // We just initialized the VM, no heap allocation failure yet.
999 JSArray::Initialize(array, 0);
1000
1001 // Set array length to 0.
1002 JSArray::SetLength(array, 0);
1003 CHECK_EQ(Smi::FromInt(0), array->length());
1004 // Must be in fast mode.
1005 CHECK(array->HasFastSmiOrObjectElements());
1006
1007 // array[length] = name.
1008 JSReceiver::SetElement(isolate, array, 0, name, SLOPPY).Check();
1009 CHECK_EQ(Smi::FromInt(1), array->length());
1010 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1011 CHECK_EQ(*element, *name);
1012
1013 // Set array length with larger than smi value.
1014 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1015
1016 uint32_t int_length = 0;
1017 CHECK(array->length()->ToArrayIndex(&int_length));
1018 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1019 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1020
1021 // array[length] = name.
1022 JSReceiver::SetElement(isolate, array, int_length, name, SLOPPY).Check();
1023 uint32_t new_int_length = 0;
1024 CHECK(array->length()->ToArrayIndex(&new_int_length));
1025 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1026 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1027 CHECK_EQ(*element, *name);
1028 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1029 CHECK_EQ(*element, *name);
1030 }
1031
1032
1033 TEST(JSObjectCopy) {
1034 CcTest::InitializeVM();
1035 Isolate* isolate = CcTest::i_isolate();
1036 Factory* factory = isolate->factory();
1037
1038 v8::HandleScope sc(CcTest::isolate());
1039 Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1040 Handle<Object> object = Object::GetProperty(
1041 CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1042 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1043 Handle<JSObject> obj = factory->NewJSObject(constructor);
1044 Handle<String> first = factory->InternalizeUtf8String("first");
1045 Handle<String> second = factory->InternalizeUtf8String("second");
1046
1047 Handle<Smi> one(Smi::FromInt(1), isolate);
1048 Handle<Smi> two(Smi::FromInt(2), isolate);
1049
1050 JSReceiver::SetProperty(obj, first, one, SLOPPY).Check();
1051 JSReceiver::SetProperty(obj, second, two, SLOPPY).Check();
1052
1053 JSReceiver::SetElement(isolate, obj, 0, first, SLOPPY).Check();
1054 JSReceiver::SetElement(isolate, obj, 1, second, SLOPPY).Check();
1055
1056 // Make the clone.
1057 Handle<Object> value1, value2;
1058 Handle<JSObject> clone = factory->CopyJSObject(obj);
1059 CHECK(!clone.is_identical_to(obj));
1060
1061 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1062 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1063 CHECK_EQ(*value1, *value2);
1064 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1065 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1066 CHECK_EQ(*value1, *value2);
1067
1068 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1069 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1070 CHECK_EQ(*value1, *value2);
1071 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1072 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1073 CHECK_EQ(*value1, *value2);
1074
1075 // Flip the values.
1076 JSReceiver::SetProperty(clone, first, two, SLOPPY).Check();
1077 JSReceiver::SetProperty(clone, second, one, SLOPPY).Check();
1078
1079 JSReceiver::SetElement(isolate, clone, 0, second, SLOPPY).Check();
1080 JSReceiver::SetElement(isolate, clone, 1, first, SLOPPY).Check();
1081
1082 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1083 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1084 CHECK_EQ(*value1, *value2);
1085 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1086 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1087 CHECK_EQ(*value1, *value2);
1088
1089 value1 = Object::GetProperty(obj, second).ToHandleChecked();
1090 value2 = Object::GetProperty(clone, first).ToHandleChecked();
1091 CHECK_EQ(*value1, *value2);
1092 value1 = Object::GetProperty(obj, first).ToHandleChecked();
1093 value2 = Object::GetProperty(clone, second).ToHandleChecked();
1094 CHECK_EQ(*value1, *value2);
1095 }
1096
1097
1098 TEST(StringAllocation) {
1099 CcTest::InitializeVM();
1100 Isolate* isolate = CcTest::i_isolate();
1101 Factory* factory = isolate->factory();
1102
1103 const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1104 for (int length = 0; length < 100; length++) {
1105 v8::HandleScope scope(CcTest::isolate());
1106 char* non_one_byte = NewArray<char>(3 * length + 1);
1107 char* one_byte = NewArray<char>(length + 1);
1108 non_one_byte[3 * length] = 0;
1109 one_byte[length] = 0;
1110 for (int i = 0; i < length; i++) {
1111 one_byte[i] = 'a';
1112 non_one_byte[3 * i] = chars[0];
1113 non_one_byte[3 * i + 1] = chars[1];
1114 non_one_byte[3 * i + 2] = chars[2];
1115 }
1116 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1117 Vector<const char>(non_one_byte, 3 * length));
1118 CHECK_EQ(length, non_one_byte_sym->length());
1119 Handle<String> one_byte_sym =
1120 factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1121 CHECK_EQ(length, one_byte_sym->length());
1122 Handle<String> non_one_byte_str =
1123 factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1124 .ToHandleChecked();
1125 non_one_byte_str->Hash();
1126 CHECK_EQ(length, non_one_byte_str->length());
1127 Handle<String> one_byte_str =
1128 factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1129 .ToHandleChecked();
1130 one_byte_str->Hash();
1131 CHECK_EQ(length, one_byte_str->length());
1132 DeleteArray(non_one_byte);
1133 DeleteArray(one_byte);
1134 }
1135 }
1136
1137
1138 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1139 // Count the number of objects found in the heap.
1140 int found_count = 0;
1141 HeapIterator iterator(heap);
1142 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1143 for (int i = 0; i < size; i++) {
1144 if (*objs[i] == obj) {
1145 found_count++;
1146 }
1147 }
1148 }
1149 return found_count;
1150 }
1151
1152
1153 TEST(Iteration) {
1154 CcTest::InitializeVM();
1155 Isolate* isolate = CcTest::i_isolate();
1156 Factory* factory = isolate->factory();
1157 v8::HandleScope scope(CcTest::isolate());
1158
1159 // Array of objects to scan haep for.
1160 const int objs_count = 6;
1161 Handle<Object> objs[objs_count];
1162 int next_objs_index = 0;
1163
1164 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1165 objs[next_objs_index++] = factory->NewJSArray(10);
1166 objs[next_objs_index++] =
1167 factory->NewJSArray(10, FAST_HOLEY_ELEMENTS, Strength::WEAK, TENURED);
1168
1169 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1170 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1171 objs[next_objs_index++] =
1172 factory->NewStringFromStaticChars("abcdefghij", TENURED);
1173
1174 // Allocate a large string (for large object space).
1175 int large_size = Page::kMaxRegularHeapObjectSize + 1;
1176 char* str = new char[large_size];
1177 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1178 str[large_size - 1] = '\0';
1179 objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1180 delete[] str;
1181
1182 // Add a Map object to look for.
1183 objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1184
1185 CHECK_EQ(objs_count, next_objs_index);
1186 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1187 }
1188
1189
1190 UNINITIALIZED_TEST(TestCodeFlushing) {
1191 // If we do not flush code this test is invalid.
1192 if (!FLAG_flush_code) return;
1193 i::FLAG_allow_natives_syntax = true;
1194 i::FLAG_optimize_for_size = false;
1195 v8::Isolate::CreateParams create_params;
1196 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1197 v8::Isolate* isolate = v8::Isolate::New(create_params);
1198 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
1199 isolate->Enter();
1200 Factory* factory = i_isolate->factory();
1201 {
1202 v8::HandleScope scope(isolate);
1203 v8::Context::New(isolate)->Enter();
1204 const char* source =
1205 "function foo() {"
1206 " var x = 42;"
1207 " var y = 42;"
1208 " var z = x + y;"
1209 "};"
1210 "foo()";
1211 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1212
1213 // This compile will add the code to the compilation cache.
1214 {
1215 v8::HandleScope scope(isolate);
1216 CompileRun(source);
1217 }
1218
1219 // Check function is compiled.
1220 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1221 foo_name).ToHandleChecked();
1222 CHECK(func_value->IsJSFunction());
1223 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1224 CHECK(function->shared()->is_compiled());
1225
1226 // The code will survive at least two GCs.
1227 i_isolate->heap()->CollectAllGarbage();
1228 i_isolate->heap()->CollectAllGarbage();
1229 CHECK(function->shared()->is_compiled());
1230
1231 // Simulate several GCs that use full marking.
1232 const int kAgingThreshold = 6;
1233 for (int i = 0; i < kAgingThreshold; i++) {
1234 i_isolate->heap()->CollectAllGarbage();
1235 }
1236
1237 // foo should no longer be in the compilation cache
1238 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1239 CHECK(!function->is_compiled() || function->IsOptimized());
1240 // Call foo to get it recompiled.
1241 CompileRun("foo()");
1242 CHECK(function->shared()->is_compiled());
1243 CHECK(function->is_compiled());
1244 }
1245 isolate->Exit();
1246 isolate->Dispose();
1247 }
1248
1249
1250 TEST(TestCodeFlushingPreAged) {
1251 // If we do not flush code this test is invalid.
1252 if (!FLAG_flush_code) return;
1253 i::FLAG_allow_natives_syntax = true;
1254 i::FLAG_optimize_for_size = true;
1255 CcTest::InitializeVM();
1256 Isolate* isolate = CcTest::i_isolate();
1257 Factory* factory = isolate->factory();
1258 v8::HandleScope scope(CcTest::isolate());
1259 const char* source = "function foo() {"
1260 " var x = 42;"
1261 " var y = 42;"
1262 " var z = x + y;"
1263 "};"
1264 "foo()";
1265 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1266
1267 // Compile foo, but don't run it.
1268 { v8::HandleScope scope(CcTest::isolate());
1269 CompileRun(source);
1270 }
1271
1272 // Check function is compiled.
1273 Handle<Object> func_value =
1274 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1275 CHECK(func_value->IsJSFunction());
1276 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1277 CHECK(function->shared()->is_compiled());
1278
1279 // The code has been run so will survive at least one GC.
1280 CcTest::heap()->CollectAllGarbage();
1281 CHECK(function->shared()->is_compiled());
1282
1283 // The code was only run once, so it should be pre-aged and collected on the
1284 // next GC.
1285 CcTest::heap()->CollectAllGarbage();
1286 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1287
1288 // Execute the function again twice, and ensure it is reset to the young age.
1289 { v8::HandleScope scope(CcTest::isolate());
1290 CompileRun("foo();"
1291 "foo();");
1292 }
1293
1294 // The code will survive at least two GC now that it is young again.
1295 CcTest::heap()->CollectAllGarbage();
1296 CcTest::heap()->CollectAllGarbage();
1297 CHECK(function->shared()->is_compiled());
1298
1299 // Simulate several GCs that use full marking.
1300 const int kAgingThreshold = 6;
1301 for (int i = 0; i < kAgingThreshold; i++) {
1302 CcTest::heap()->CollectAllGarbage();
1303 }
1304
1305 // foo should no longer be in the compilation cache
1306 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1307 CHECK(!function->is_compiled() || function->IsOptimized());
1308 // Call foo to get it recompiled.
1309 CompileRun("foo()");
1310 CHECK(function->shared()->is_compiled());
1311 CHECK(function->is_compiled());
1312 }
1313
1314
1315 TEST(TestCodeFlushingIncremental) {
1316 // If we do not flush code this test is invalid.
1317 if (!FLAG_flush_code) return;
1318 i::FLAG_allow_natives_syntax = true;
1319 i::FLAG_optimize_for_size = false;
1320 CcTest::InitializeVM();
1321 Isolate* isolate = CcTest::i_isolate();
1322 Factory* factory = isolate->factory();
1323 v8::HandleScope scope(CcTest::isolate());
1324 const char* source = "function foo() {"
1325 " var x = 42;"
1326 " var y = 42;"
1327 " var z = x + y;"
1328 "};"
1329 "foo()";
1330 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1331
1332 // This compile will add the code to the compilation cache.
1333 { v8::HandleScope scope(CcTest::isolate());
1334 CompileRun(source);
1335 }
1336
1337 // Check function is compiled.
1338 Handle<Object> func_value =
1339 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1340 CHECK(func_value->IsJSFunction());
1341 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1342 CHECK(function->shared()->is_compiled());
1343
1344 // The code will survive at least two GCs.
1345 CcTest::heap()->CollectAllGarbage();
1346 CcTest::heap()->CollectAllGarbage();
1347 CHECK(function->shared()->is_compiled());
1348
1349 // Simulate several GCs that use incremental marking.
1350 const int kAgingThreshold = 6;
1351 for (int i = 0; i < kAgingThreshold; i++) {
1352 SimulateIncrementalMarking(CcTest::heap());
1353 CcTest::heap()->CollectAllGarbage();
1354 }
1355 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1356 CHECK(!function->is_compiled() || function->IsOptimized());
1357
1358 // This compile will compile the function again.
1359 { v8::HandleScope scope(CcTest::isolate());
1360 CompileRun("foo();");
1361 }
1362
1363 // Simulate several GCs that use incremental marking but make sure
1364 // the loop breaks once the function is enqueued as a candidate.
1365 for (int i = 0; i < kAgingThreshold; i++) {
1366 SimulateIncrementalMarking(CcTest::heap());
1367 if (!function->next_function_link()->IsUndefined()) break;
1368 CcTest::heap()->CollectAllGarbage();
1369 }
1370
1371 // Force optimization while incremental marking is active and while
1372 // the function is enqueued as a candidate.
1373 { v8::HandleScope scope(CcTest::isolate());
1374 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1375 }
1376
1377 // Simulate one final GC to make sure the candidate queue is sane.
1378 CcTest::heap()->CollectAllGarbage();
1379 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1380 CHECK(function->is_compiled() || !function->IsOptimized());
1381 }
1382
1383
1384 TEST(TestCodeFlushingIncrementalScavenge) {
1385 // If we do not flush code this test is invalid.
1386 if (!FLAG_flush_code) return;
1387 i::FLAG_allow_natives_syntax = true;
1388 i::FLAG_optimize_for_size = false;
1389 CcTest::InitializeVM();
1390 Isolate* isolate = CcTest::i_isolate();
1391 Factory* factory = isolate->factory();
1392 v8::HandleScope scope(CcTest::isolate());
1393 const char* source = "var foo = function() {"
1394 " var x = 42;"
1395 " var y = 42;"
1396 " var z = x + y;"
1397 "};"
1398 "foo();"
1399 "var bar = function() {"
1400 " var x = 23;"
1401 "};"
1402 "bar();";
1403 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1404 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1405
1406 // Perfrom one initial GC to enable code flushing.
1407 CcTest::heap()->CollectAllGarbage();
1408
1409 // This compile will add the code to the compilation cache.
1410 { v8::HandleScope scope(CcTest::isolate());
1411 CompileRun(source);
1412 }
1413
1414 // Check functions are compiled.
1415 Handle<Object> func_value =
1416 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1417 CHECK(func_value->IsJSFunction());
1418 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1419 CHECK(function->shared()->is_compiled());
1420 Handle<Object> func_value2 =
1421 Object::GetProperty(isolate->global_object(), bar_name).ToHandleChecked();
1422 CHECK(func_value2->IsJSFunction());
1423 Handle<JSFunction> function2 = Handle<JSFunction>::cast(func_value2);
1424 CHECK(function2->shared()->is_compiled());
1425
1426 // Clear references to functions so that one of them can die.
1427 { v8::HandleScope scope(CcTest::isolate());
1428 CompileRun("foo = 0; bar = 0;");
1429 }
1430
1431 // Bump the code age so that flushing is triggered while the function
1432 // object is still located in new-space.
1433 const int kAgingThreshold = 6;
1434 for (int i = 0; i < kAgingThreshold; i++) {
1435 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1436 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1437 }
1438
1439 // Simulate incremental marking so that the functions are enqueued as
1440 // code flushing candidates. Then kill one of the functions. Finally
1441 // perform a scavenge while incremental marking is still running.
1442 SimulateIncrementalMarking(CcTest::heap());
1443 *function2.location() = NULL;
1444 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1445
1446 // Simulate one final GC to make sure the candidate queue is sane.
1447 CcTest::heap()->CollectAllGarbage();
1448 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1449 CHECK(!function->is_compiled() || function->IsOptimized());
1450 }
1451
1452
1453 TEST(TestCodeFlushingIncrementalAbort) {
1454 // If we do not flush code this test is invalid.
1455 if (!FLAG_flush_code) return;
1456 i::FLAG_allow_natives_syntax = true;
1457 i::FLAG_optimize_for_size = false;
1458 CcTest::InitializeVM();
1459 Isolate* isolate = CcTest::i_isolate();
1460 Factory* factory = isolate->factory();
1461 Heap* heap = isolate->heap();
1462 v8::HandleScope scope(CcTest::isolate());
1463 const char* source = "function foo() {"
1464 " var x = 42;"
1465 " var y = 42;"
1466 " var z = x + y;"
1467 "};"
1468 "foo()";
1469 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1470
1471 // This compile will add the code to the compilation cache.
1472 { v8::HandleScope scope(CcTest::isolate());
1473 CompileRun(source);
1474 }
1475
1476 // Check function is compiled.
1477 Handle<Object> func_value =
1478 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1479 CHECK(func_value->IsJSFunction());
1480 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1481 CHECK(function->shared()->is_compiled());
1482
1483 // The code will survive at least two GCs.
1484 heap->CollectAllGarbage();
1485 heap->CollectAllGarbage();
1486 CHECK(function->shared()->is_compiled());
1487
1488 // Bump the code age so that flushing is triggered.
1489 const int kAgingThreshold = 6;
1490 for (int i = 0; i < kAgingThreshold; i++) {
1491 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1492 }
1493
1494 // Simulate incremental marking so that the function is enqueued as
1495 // code flushing candidate.
1496 SimulateIncrementalMarking(heap);
1497
1498 // Enable the debugger and add a breakpoint while incremental marking
1499 // is running so that incremental marking aborts and code flushing is
1500 // disabled.
1501 int position = 0;
1502 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1503 EnableDebugger(CcTest::isolate());
1504 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1505 isolate->debug()->ClearAllBreakPoints();
1506 DisableDebugger(CcTest::isolate());
1507
1508 // Force optimization now that code flushing is disabled.
1509 { v8::HandleScope scope(CcTest::isolate());
1510 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1511 }
1512
1513 // Simulate one final GC to make sure the candidate queue is sane.
1514 heap->CollectAllGarbage();
1515 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1516 CHECK(function->is_compiled() || !function->IsOptimized());
1517 }
1518
1519
1520 TEST(CompilationCacheCachingBehavior) {
1521 // If we do not flush code, or have the compilation cache turned off, this
1522 // test is invalid.
1523 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1524 return;
1525 }
1526 CcTest::InitializeVM();
1527 Isolate* isolate = CcTest::i_isolate();
1528 Factory* factory = isolate->factory();
1529 Heap* heap = isolate->heap();
1530 CompilationCache* compilation_cache = isolate->compilation_cache();
1531 LanguageMode language_mode =
1532 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1533
1534 v8::HandleScope scope(CcTest::isolate());
1535 const char* raw_source =
1536 "function foo() {"
1537 " var x = 42;"
1538 " var y = 42;"
1539 " var z = x + y;"
1540 "};"
1541 "foo()";
1542 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1543 Handle<Context> native_context = isolate->native_context();
1544
1545 {
1546 v8::HandleScope scope(CcTest::isolate());
1547 CompileRun(raw_source);
1548 }
1549
1550 // On first compilation, only a hash is inserted in the code cache. We can't
1551 // find that value.
1552 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1553 source, Handle<Object>(), 0, 0,
1554 v8::ScriptOriginOptions(false, true, false), native_context,
1555 language_mode);
1556 CHECK(info.is_null());
1557
1558 {
1559 v8::HandleScope scope(CcTest::isolate());
1560 CompileRun(raw_source);
1561 }
1562
1563 // On second compilation, the hash is replaced by a real cache entry mapping
1564 // the source to the shared function info containing the code.
1565 info = compilation_cache->LookupScript(
1566 source, Handle<Object>(), 0, 0,
1567 v8::ScriptOriginOptions(false, true, false), native_context,
1568 language_mode);
1569 CHECK(!info.is_null());
1570
1571 heap->CollectAllGarbage();
1572
1573 // On second compilation, the hash is replaced by a real cache entry mapping
1574 // the source to the shared function info containing the code.
1575 info = compilation_cache->LookupScript(
1576 source, Handle<Object>(), 0, 0,
1577 v8::ScriptOriginOptions(false, true, false), native_context,
1578 language_mode);
1579 CHECK(!info.is_null());
1580
1581 while (!info.ToHandleChecked()->code()->IsOld()) {
1582 info.ToHandleChecked()->code()->MakeOlder(NO_MARKING_PARITY);
1583 }
1584
1585 heap->CollectAllGarbage();
1586 // Ensure code aging cleared the entry from the cache.
1587 info = compilation_cache->LookupScript(
1588 source, Handle<Object>(), 0, 0,
1589 v8::ScriptOriginOptions(false, true, false), native_context,
1590 language_mode);
1591 CHECK(info.is_null());
1592
1593 {
1594 v8::HandleScope scope(CcTest::isolate());
1595 CompileRun(raw_source);
1596 }
1597
1598 // On first compilation, only a hash is inserted in the code cache. We can't
1599 // find that value.
1600 info = compilation_cache->LookupScript(
1601 source, Handle<Object>(), 0, 0,
1602 v8::ScriptOriginOptions(false, true, false), native_context,
1603 language_mode);
1604 CHECK(info.is_null());
1605
1606 for (int i = 0; i < CompilationCacheTable::kHashGenerations; i++) {
1607 compilation_cache->MarkCompactPrologue();
1608 }
1609
1610 {
1611 v8::HandleScope scope(CcTest::isolate());
1612 CompileRun(raw_source);
1613 }
1614
1615 // If we aged the cache before caching the script, ensure that we didn't cache
1616 // on next compilation.
1617 info = compilation_cache->LookupScript(
1618 source, Handle<Object>(), 0, 0,
1619 v8::ScriptOriginOptions(false, true, false), native_context,
1620 language_mode);
1621 CHECK(info.is_null());
1622 }
1623
1624
1625 static void OptimizeEmptyFunction(const char* name) {
1626 HandleScope scope(CcTest::i_isolate());
1627 EmbeddedVector<char, 256> source;
1628 SNPrintF(source,
1629 "function %s() { return 0; }"
1630 "%s(); %s();"
1631 "%%OptimizeFunctionOnNextCall(%s);"
1632 "%s();",
1633 name, name, name, name, name);
1634 CompileRun(source.start());
1635 }
1636
1637
1638 // Count the number of native contexts in the weak list of native contexts.
1639 int CountNativeContexts() {
1640 int count = 0;
1641 Object* object = CcTest::heap()->native_contexts_list();
1642 while (!object->IsUndefined()) {
1643 count++;
1644 object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1645 }
1646 return count;
1647 }
1648
1649
1650 // Count the number of user functions in the weak list of optimized
1651 // functions attached to a native context.
1652 static int CountOptimizedUserFunctions(v8::Local<v8::Context> context) {
1653 int count = 0;
1654 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1655 Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1656 while (object->IsJSFunction() &&
1657 !JSFunction::cast(object)->shared()->IsBuiltin()) {
1658 count++;
1659 object = JSFunction::cast(object)->next_function_link();
1660 }
1661 return count;
1662 }
1663
1664
1665 TEST(TestInternalWeakLists) {
1666 FLAG_always_opt = false;
1667 FLAG_allow_natives_syntax = true;
1668 v8::V8::Initialize();
1669
1670 // Some flags turn Scavenge collections into Mark-sweep collections
1671 // and hence are incompatible with this test case.
1672 if (FLAG_gc_global || FLAG_stress_compaction) return;
1673 FLAG_retain_maps_for_n_gc = 0;
1674
1675 static const int kNumTestContexts = 10;
1676
1677 Isolate* isolate = CcTest::i_isolate();
1678 Heap* heap = isolate->heap();
1679 HandleScope scope(isolate);
1680 v8::Local<v8::Context> ctx[kNumTestContexts];
1681 if (!isolate->use_crankshaft()) return;
1682
1683 CHECK_EQ(0, CountNativeContexts());
1684
1685 // Create a number of global contests which gets linked together.
1686 for (int i = 0; i < kNumTestContexts; i++) {
1687 ctx[i] = v8::Context::New(CcTest::isolate());
1688
1689 // Collect garbage that might have been created by one of the
1690 // installed extensions.
1691 isolate->compilation_cache()->Clear();
1692 heap->CollectAllGarbage();
1693
1694 CHECK_EQ(i + 1, CountNativeContexts());
1695
1696 ctx[i]->Enter();
1697
1698 // Create a handle scope so no function objects get stuck in the outer
1699 // handle scope.
1700 HandleScope scope(isolate);
1701 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1702 OptimizeEmptyFunction("f1");
1703 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1704 OptimizeEmptyFunction("f2");
1705 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1706 OptimizeEmptyFunction("f3");
1707 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1708 OptimizeEmptyFunction("f4");
1709 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1710 OptimizeEmptyFunction("f5");
1711 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1712
1713 // Remove function f1, and
1714 CompileRun("f1=null");
1715
1716 // Scavenge treats these references as strong.
1717 for (int j = 0; j < 10; j++) {
1718 CcTest::heap()->CollectGarbage(NEW_SPACE);
1719 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1720 }
1721
1722 // Mark compact handles the weak references.
1723 isolate->compilation_cache()->Clear();
1724 heap->CollectAllGarbage();
1725 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1726
1727 // Get rid of f3 and f5 in the same way.
1728 CompileRun("f3=null");
1729 for (int j = 0; j < 10; j++) {
1730 CcTest::heap()->CollectGarbage(NEW_SPACE);
1731 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1732 }
1733 CcTest::heap()->CollectAllGarbage();
1734 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1735 CompileRun("f5=null");
1736 for (int j = 0; j < 10; j++) {
1737 CcTest::heap()->CollectGarbage(NEW_SPACE);
1738 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1739 }
1740 CcTest::heap()->CollectAllGarbage();
1741 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1742
1743 ctx[i]->Exit();
1744 }
1745
1746 // Force compilation cache cleanup.
1747 CcTest::heap()->NotifyContextDisposed(true);
1748 CcTest::heap()->CollectAllGarbage();
1749
1750 // Dispose the native contexts one by one.
1751 for (int i = 0; i < kNumTestContexts; i++) {
1752 // TODO(dcarney): is there a better way to do this?
1753 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1754 *unsafe = CcTest::heap()->undefined_value();
1755 ctx[i].Clear();
1756
1757 // Scavenge treats these references as strong.
1758 for (int j = 0; j < 10; j++) {
1759 CcTest::heap()->CollectGarbage(i::NEW_SPACE);
1760 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1761 }
1762
1763 // Mark compact handles the weak references.
1764 CcTest::heap()->CollectAllGarbage();
1765 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1766 }
1767
1768 CHECK_EQ(0, CountNativeContexts());
1769 }
1770
1771
1772 // Count the number of native contexts in the weak list of native contexts
1773 // causing a GC after the specified number of elements.
1774 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1775 Heap* heap = isolate->heap();
1776 int count = 0;
1777 Handle<Object> object(heap->native_contexts_list(), isolate);
1778 while (!object->IsUndefined()) {
1779 count++;
1780 if (count == n) heap->CollectAllGarbage();
1781 object =
1782 Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
1783 isolate);
1784 }
1785 return count;
1786 }
1787
1788
1789 // Count the number of user functions in the weak list of optimized
1790 // functions attached to a native context causing a GC after the
1791 // specified number of elements.
1792 static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1793 int n) {
1794 int count = 0;
1795 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1796 Isolate* isolate = icontext->GetIsolate();
1797 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1798 isolate);
1799 while (object->IsJSFunction() &&
1800 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1801 count++;
1802 if (count == n) isolate->heap()->CollectAllGarbage();
1803 object = Handle<Object>(
1804 Object::cast(JSFunction::cast(*object)->next_function_link()),
1805 isolate);
1806 }
1807 return count;
1808 }
1809
1810
1811 TEST(TestInternalWeakListsTraverseWithGC) {
1812 FLAG_always_opt = false;
1813 FLAG_allow_natives_syntax = true;
1814 v8::V8::Initialize();
1815
1816 static const int kNumTestContexts = 10;
1817
1818 Isolate* isolate = CcTest::i_isolate();
1819 HandleScope scope(isolate);
1820 v8::Local<v8::Context> ctx[kNumTestContexts];
1821 if (!isolate->use_crankshaft()) return;
1822
1823 CHECK_EQ(0, CountNativeContexts());
1824
1825 // Create an number of contexts and check the length of the weak list both
1826 // with and without GCs while iterating the list.
1827 for (int i = 0; i < kNumTestContexts; i++) {
1828 ctx[i] = v8::Context::New(CcTest::isolate());
1829 CHECK_EQ(i + 1, CountNativeContexts());
1830 CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
1831 }
1832
1833 ctx[0]->Enter();
1834
1835 // Compile a number of functions the length of the weak list of optimized
1836 // functions both with and without GCs while iterating the list.
1837 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1838 OptimizeEmptyFunction("f1");
1839 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
1840 CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1841 OptimizeEmptyFunction("f2");
1842 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
1843 CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1844 OptimizeEmptyFunction("f3");
1845 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
1846 CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1847 OptimizeEmptyFunction("f4");
1848 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
1849 CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1850 OptimizeEmptyFunction("f5");
1851 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
1852 CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1853
1854 ctx[0]->Exit();
1855 }
1856
1857
1858 TEST(TestSizeOfRegExpCode) {
1859 if (!FLAG_regexp_optimization) return;
1860
1861 v8::V8::Initialize();
1862
1863 Isolate* isolate = CcTest::i_isolate();
1864 HandleScope scope(isolate);
1865
1866 LocalContext context;
1867
1868 // Adjust source below and this check to match
1869 // RegExpImple::kRegExpTooLargeToOptimize.
1870 CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1871
1872 // Compile a regexp that is much larger if we are using regexp optimizations.
1873 CompileRun(
1874 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1875 "var half_size_reg_exp;"
1876 "while (reg_exp_source.length < 20 * 1024) {"
1877 " half_size_reg_exp = reg_exp_source;"
1878 " reg_exp_source = reg_exp_source + reg_exp_source;"
1879 "}"
1880 // Flatten string.
1881 "reg_exp_source.match(/f/);");
1882
1883 // Get initial heap size after several full GCs, which will stabilize
1884 // the heap size and return with sweeping finished completely.
1885 CcTest::heap()->CollectAllGarbage();
1886 CcTest::heap()->CollectAllGarbage();
1887 CcTest::heap()->CollectAllGarbage();
1888 CcTest::heap()->CollectAllGarbage();
1889 CcTest::heap()->CollectAllGarbage();
1890 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1891 if (collector->sweeping_in_progress()) {
1892 collector->EnsureSweepingCompleted();
1893 }
1894 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1895
1896 CompileRun("'foo'.match(reg_exp_source);");
1897 CcTest::heap()->CollectAllGarbage();
1898 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1899
1900 CompileRun("'foo'.match(half_size_reg_exp);");
1901 CcTest::heap()->CollectAllGarbage();
1902 int size_with_optimized_regexp =
1903 static_cast<int>(CcTest::heap()->SizeOfObjects());
1904
1905 int size_of_regexp_code = size_with_regexp - initial_size;
1906
1907 // On some platforms the debug-code flag causes huge amounts of regexp code
1908 // to be emitted, breaking this test.
1909 if (!FLAG_debug_code) {
1910 CHECK_LE(size_of_regexp_code, 1 * MB);
1911 }
1912
1913 // Small regexp is half the size, but compiles to more than twice the code
1914 // due to the optimization steps.
1915 CHECK_GE(size_with_optimized_regexp,
1916 size_with_regexp + size_of_regexp_code * 2);
1917 }
1918
1919
1920 HEAP_TEST(TestSizeOfObjects) {
1921 v8::V8::Initialize();
1922
1923 // Get initial heap size after several full GCs, which will stabilize
1924 // the heap size and return with sweeping finished completely.
1925 CcTest::heap()->CollectAllGarbage();
1926 CcTest::heap()->CollectAllGarbage();
1927 CcTest::heap()->CollectAllGarbage();
1928 CcTest::heap()->CollectAllGarbage();
1929 CcTest::heap()->CollectAllGarbage();
1930 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1931 if (collector->sweeping_in_progress()) {
1932 collector->EnsureSweepingCompleted();
1933 }
1934 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1935
1936 {
1937 // Allocate objects on several different old-space pages so that
1938 // concurrent sweeper threads will be busy sweeping the old space on
1939 // subsequent GC runs.
1940 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1941 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1942 for (int i = 1; i <= 100; i++) {
1943 CcTest::heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1944 CHECK_EQ(initial_size + i * filler_size,
1945 static_cast<int>(CcTest::heap()->SizeOfObjects()));
1946 }
1947 }
1948
1949 // The heap size should go back to initial size after a full GC, even
1950 // though sweeping didn't finish yet.
1951 CcTest::heap()->CollectAllGarbage();
1952
1953 // Normally sweeping would not be complete here, but no guarantees.
1954
1955 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1956
1957 // Waiting for sweeper threads should not change heap size.
1958 if (collector->sweeping_in_progress()) {
1959 collector->EnsureSweepingCompleted();
1960 }
1961 CHECK_EQ(initial_size, static_cast<int>(CcTest::heap()->SizeOfObjects()));
1962 }
1963
1964
1965 TEST(TestAlignmentCalculations) {
1966 // Maximum fill amounts are consistent.
1967 int maximum_double_misalignment = kDoubleSize - kPointerSize;
1968 int maximum_simd128_misalignment = kSimd128Size - kPointerSize;
1969 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1970 CHECK_EQ(0, max_word_fill);
1971 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1972 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1973 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1974 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1975 int max_simd128_unaligned_fill =
1976 Heap::GetMaximumFillToAlign(kSimd128Unaligned);
1977 CHECK_EQ(maximum_simd128_misalignment, max_simd128_unaligned_fill);
1978
1979 Address base = static_cast<Address>(NULL);
1980 int fill = 0;
1981
1982 // Word alignment never requires fill.
1983 fill = Heap::GetFillToAlign(base, kWordAligned);
1984 CHECK_EQ(0, fill);
1985 fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
1986 CHECK_EQ(0, fill);
1987
1988 // No fill is required when address is double aligned.
1989 fill = Heap::GetFillToAlign(base, kDoubleAligned);
1990 CHECK_EQ(0, fill);
1991 // Fill is required if address is not double aligned.
1992 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
1993 CHECK_EQ(maximum_double_misalignment, fill);
1994 // kDoubleUnaligned has the opposite fill amounts.
1995 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1996 CHECK_EQ(maximum_double_misalignment, fill);
1997 fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
1998 CHECK_EQ(0, fill);
1999
2000 // 128 bit SIMD types have 2 or 4 possible alignments, depending on platform.
2001 fill = Heap::GetFillToAlign(base, kSimd128Unaligned);
2002 CHECK_EQ((3 * kPointerSize) & kSimd128AlignmentMask, fill);
2003 fill = Heap::GetFillToAlign(base + kPointerSize, kSimd128Unaligned);
2004 CHECK_EQ((2 * kPointerSize) & kSimd128AlignmentMask, fill);
2005 fill = Heap::GetFillToAlign(base + 2 * kPointerSize, kSimd128Unaligned);
2006 CHECK_EQ(kPointerSize, fill);
2007 fill = Heap::GetFillToAlign(base + 3 * kPointerSize, kSimd128Unaligned);
2008 CHECK_EQ(0, fill);
2009 }
2010
2011
2012 static HeapObject* NewSpaceAllocateAligned(int size,
2013 AllocationAlignment alignment) {
2014 Heap* heap = CcTest::heap();
2015 AllocationResult allocation =
2016 heap->new_space()->AllocateRawAligned(size, alignment);
2017 HeapObject* obj = NULL;
2018 allocation.To(&obj);
2019 heap->CreateFillerObjectAt(obj->address(), size);
2020 return obj;
2021 }
2022
2023
2024 // Get new space allocation into the desired alignment.
2025 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
2026 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2027 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2028 if (fill) {
2029 NewSpaceAllocateAligned(fill + offset, kWordAligned);
2030 }
2031 return *top_addr;
2032 }
2033
2034
2035 TEST(TestAlignedAllocation) {
2036 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2037 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2038 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
2039 Address start;
2040 HeapObject* obj;
2041 HeapObject* filler;
2042 if (double_misalignment) {
2043 // Allocate a pointer sized object that must be double aligned at an
2044 // aligned address.
2045 start = AlignNewSpace(kDoubleAligned, 0);
2046 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2047 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2048 // There is no filler.
2049 CHECK_EQ(kPointerSize, *top_addr - start);
2050
2051 // Allocate a second pointer sized object that must be double aligned at an
2052 // unaligned address.
2053 start = AlignNewSpace(kDoubleAligned, kPointerSize);
2054 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2055 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2056 // There is a filler object before the object.
2057 filler = HeapObject::FromAddress(start);
2058 CHECK(obj != filler && filler->IsFiller() &&
2059 filler->Size() == kPointerSize);
2060 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2061
2062 // Similarly for kDoubleUnaligned.
2063 start = AlignNewSpace(kDoubleUnaligned, 0);
2064 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2065 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2066 CHECK_EQ(kPointerSize, *top_addr - start);
2067 start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
2068 obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2069 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2070 // There is a filler object before the object.
2071 filler = HeapObject::FromAddress(start);
2072 CHECK(obj != filler && filler->IsFiller() &&
2073 filler->Size() == kPointerSize);
2074 CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
2075 }
2076
2077 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2078 // on platform.
2079 start = AlignNewSpace(kSimd128Unaligned, 0);
2080 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2081 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2082 // There is no filler.
2083 CHECK_EQ(kPointerSize, *top_addr - start);
2084 start = AlignNewSpace(kSimd128Unaligned, kPointerSize);
2085 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2086 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2087 // There is a filler object before the object.
2088 filler = HeapObject::FromAddress(start);
2089 CHECK(obj != filler && filler->IsFiller() &&
2090 filler->Size() == kSimd128Size - kPointerSize);
2091 CHECK_EQ(kPointerSize + kSimd128Size - kPointerSize, *top_addr - start);
2092
2093 if (double_misalignment) {
2094 // Test the 2 other alignments possible on 32 bit platforms.
2095 start = AlignNewSpace(kSimd128Unaligned, 2 * kPointerSize);
2096 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2097 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2098 // There is a filler object before the object.
2099 filler = HeapObject::FromAddress(start);
2100 CHECK(obj != filler && filler->IsFiller() &&
2101 filler->Size() == 2 * kPointerSize);
2102 CHECK_EQ(kPointerSize + 2 * kPointerSize, *top_addr - start);
2103 start = AlignNewSpace(kSimd128Unaligned, 3 * kPointerSize);
2104 obj = NewSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2105 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2106 // There is a filler object before the object.
2107 filler = HeapObject::FromAddress(start);
2108 CHECK(obj != filler && filler->IsFiller() &&
2109 filler->Size() == kPointerSize);
2110 CHECK_EQ(kPointerSize + kPointerSize, *top_addr - start);
2111 }
2112 }
2113
2114
2115 static HeapObject* OldSpaceAllocateAligned(int size,
2116 AllocationAlignment alignment) {
2117 Heap* heap = CcTest::heap();
2118 AllocationResult allocation =
2119 heap->old_space()->AllocateRawAligned(size, alignment);
2120 HeapObject* obj = NULL;
2121 allocation.To(&obj);
2122 heap->CreateFillerObjectAt(obj->address(), size);
2123 return obj;
2124 }
2125
2126
2127 // Get old space allocation into the desired alignment.
2128 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
2129 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
2130 int fill = Heap::GetFillToAlign(*top_addr, alignment);
2131 int allocation = fill + offset;
2132 if (allocation) {
2133 OldSpaceAllocateAligned(allocation, kWordAligned);
2134 }
2135 Address top = *top_addr;
2136 // Now force the remaining allocation onto the free list.
2137 CcTest::heap()->old_space()->EmptyAllocationInfo();
2138 return top;
2139 }
2140
2141
2142 // Test the case where allocation must be done from the free list, so filler
2143 // may precede or follow the object.
2144 TEST(TestAlignedOverAllocation) {
2145 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
2146 const intptr_t double_misalignment = kDoubleSize - kPointerSize;
2147 Address start;
2148 HeapObject* obj;
2149 HeapObject* filler1;
2150 HeapObject* filler2;
2151 if (double_misalignment) {
2152 start = AlignOldSpace(kDoubleAligned, 0);
2153 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2154 // The object is aligned, and a filler object is created after.
2155 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2156 filler1 = HeapObject::FromAddress(start + kPointerSize);
2157 CHECK(obj != filler1 && filler1->IsFiller() &&
2158 filler1->Size() == kPointerSize);
2159 // Try the opposite alignment case.
2160 start = AlignOldSpace(kDoubleAligned, kPointerSize);
2161 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
2162 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
2163 filler1 = HeapObject::FromAddress(start);
2164 CHECK(obj != filler1);
2165 CHECK(filler1->IsFiller());
2166 CHECK(filler1->Size() == kPointerSize);
2167 CHECK(obj != filler1 && filler1->IsFiller() &&
2168 filler1->Size() == kPointerSize);
2169
2170 // Similarly for kDoubleUnaligned.
2171 start = AlignOldSpace(kDoubleUnaligned, 0);
2172 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2173 // The object is aligned, and a filler object is created after.
2174 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2175 filler1 = HeapObject::FromAddress(start + kPointerSize);
2176 CHECK(obj != filler1 && filler1->IsFiller() &&
2177 filler1->Size() == kPointerSize);
2178 // Try the opposite alignment case.
2179 start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
2180 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
2181 CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
2182 filler1 = HeapObject::FromAddress(start);
2183 CHECK(obj != filler1 && filler1->IsFiller() &&
2184 filler1->Size() == kPointerSize);
2185 }
2186
2187 // Now test SIMD alignment. There are 2 or 4 possible alignments, depending
2188 // on platform.
2189 start = AlignOldSpace(kSimd128Unaligned, 0);
2190 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2191 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2192 // There is a filler object after the object.
2193 filler1 = HeapObject::FromAddress(start + kPointerSize);
2194 CHECK(obj != filler1 && filler1->IsFiller() &&
2195 filler1->Size() == kSimd128Size - kPointerSize);
2196 start = AlignOldSpace(kSimd128Unaligned, kPointerSize);
2197 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2198 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2199 // There is a filler object before the object.
2200 filler1 = HeapObject::FromAddress(start);
2201 CHECK(obj != filler1 && filler1->IsFiller() &&
2202 filler1->Size() == kSimd128Size - kPointerSize);
2203
2204 if (double_misalignment) {
2205 // Test the 2 other alignments possible on 32 bit platforms.
2206 start = AlignOldSpace(kSimd128Unaligned, 2 * kPointerSize);
2207 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2208 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2209 // There are filler objects before and after the object.
2210 filler1 = HeapObject::FromAddress(start);
2211 CHECK(obj != filler1 && filler1->IsFiller() &&
2212 filler1->Size() == 2 * kPointerSize);
2213 filler2 = HeapObject::FromAddress(start + 3 * kPointerSize);
2214 CHECK(obj != filler2 && filler2->IsFiller() &&
2215 filler2->Size() == kPointerSize);
2216 start = AlignOldSpace(kSimd128Unaligned, 3 * kPointerSize);
2217 obj = OldSpaceAllocateAligned(kPointerSize, kSimd128Unaligned);
2218 CHECK(IsAddressAligned(obj->address(), kSimd128Alignment, kPointerSize));
2219 // There are filler objects before and after the object.
2220 filler1 = HeapObject::FromAddress(start);
2221 CHECK(obj != filler1 && filler1->IsFiller() &&
2222 filler1->Size() == kPointerSize);
2223 filler2 = HeapObject::FromAddress(start + 2 * kPointerSize);
2224 CHECK(obj != filler2 && filler2->IsFiller() &&
2225 filler2->Size() == 2 * kPointerSize);
2226 }
2227 }
2228
2229
2230 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
2231 CcTest::InitializeVM();
2232 HeapIterator iterator(CcTest::heap());
2233 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
2234 intptr_t size_of_objects_2 = 0;
2235 for (HeapObject* obj = iterator.next();
2236 obj != NULL;
2237 obj = iterator.next()) {
2238 if (!obj->IsFreeSpace()) {
2239 size_of_objects_2 += obj->Size();
2240 }
2241 }
2242 // Delta must be within 5% of the larger result.
2243 // TODO(gc): Tighten this up by distinguishing between byte
2244 // arrays that are real and those that merely mark free space
2245 // on the heap.
2246 if (size_of_objects_1 > size_of_objects_2) {
2247 intptr_t delta = size_of_objects_1 - size_of_objects_2;
2248 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2249 "Iterator: %" V8_PTR_PREFIX "d, "
2250 "delta: %" V8_PTR_PREFIX "d\n",
2251 size_of_objects_1, size_of_objects_2, delta);
2252 CHECK_GT(size_of_objects_1 / 20, delta);
2253 } else {
2254 intptr_t delta = size_of_objects_2 - size_of_objects_1;
2255 PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
2256 "Iterator: %" V8_PTR_PREFIX "d, "
2257 "delta: %" V8_PTR_PREFIX "d\n",
2258 size_of_objects_1, size_of_objects_2, delta);
2259 CHECK_GT(size_of_objects_2 / 20, delta);
2260 }
2261 }
2262
2263
2264 static void FillUpNewSpace(NewSpace* new_space) {
2265 // Fill up new space to the point that it is completely full. Make sure
2266 // that the scavenger does not undo the filling.
2267 Heap* heap = new_space->heap();
2268 Isolate* isolate = heap->isolate();
2269 Factory* factory = isolate->factory();
2270 HandleScope scope(isolate);
2271 AlwaysAllocateScope always_allocate(isolate);
2272 intptr_t available = new_space->Capacity() - new_space->Size();
2273 intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
2274 for (intptr_t i = 0; i < number_of_fillers; i++) {
2275 CHECK(heap->InNewSpace(*factory->NewFixedArray(32, NOT_TENURED)));
2276 }
2277 }
2278
2279
2280 TEST(GrowAndShrinkNewSpace) {
2281 CcTest::InitializeVM();
2282 Heap* heap = CcTest::heap();
2283 NewSpace* new_space = heap->new_space();
2284
2285 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2286 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2287 // The max size cannot exceed the reserved size, since semispaces must be
2288 // always within the reserved space. We can't test new space growing and
2289 // shrinking if the reserved size is the same as the minimum (initial) size.
2290 return;
2291 }
2292
2293 // Explicitly growing should double the space capacity.
2294 intptr_t old_capacity, new_capacity;
2295 old_capacity = new_space->TotalCapacity();
2296 new_space->Grow();
2297 new_capacity = new_space->TotalCapacity();
2298 CHECK(2 * old_capacity == new_capacity);
2299
2300 old_capacity = new_space->TotalCapacity();
2301 FillUpNewSpace(new_space);
2302 new_capacity = new_space->TotalCapacity();
2303 CHECK(old_capacity == new_capacity);
2304
2305 // Explicitly shrinking should not affect space capacity.
2306 old_capacity = new_space->TotalCapacity();
2307 new_space->Shrink();
2308 new_capacity = new_space->TotalCapacity();
2309 CHECK(old_capacity == new_capacity);
2310
2311 // Let the scavenger empty the new space.
2312 heap->CollectGarbage(NEW_SPACE);
2313 CHECK_LE(new_space->Size(), old_capacity);
2314
2315 // Explicitly shrinking should halve the space capacity.
2316 old_capacity = new_space->TotalCapacity();
2317 new_space->Shrink();
2318 new_capacity = new_space->TotalCapacity();
2319 CHECK(old_capacity == 2 * new_capacity);
2320
2321 // Consecutive shrinking should not affect space capacity.
2322 old_capacity = new_space->TotalCapacity();
2323 new_space->Shrink();
2324 new_space->Shrink();
2325 new_space->Shrink();
2326 new_capacity = new_space->TotalCapacity();
2327 CHECK(old_capacity == new_capacity);
2328 }
2329
2330
2331 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2332 CcTest::InitializeVM();
2333 Heap* heap = CcTest::heap();
2334 if (heap->ReservedSemiSpaceSize() == heap->InitialSemiSpaceSize() ||
2335 heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2336 // The max size cannot exceed the reserved size, since semispaces must be
2337 // always within the reserved space. We can't test new space growing and
2338 // shrinking if the reserved size is the same as the minimum (initial) size.
2339 return;
2340 }
2341
2342 v8::HandleScope scope(CcTest::isolate());
2343 NewSpace* new_space = heap->new_space();
2344 intptr_t old_capacity, new_capacity;
2345 old_capacity = new_space->TotalCapacity();
2346 new_space->Grow();
2347 new_capacity = new_space->TotalCapacity();
2348 CHECK(2 * old_capacity == new_capacity);
2349 FillUpNewSpace(new_space);
2350 heap->CollectAllAvailableGarbage();
2351 new_capacity = new_space->TotalCapacity();
2352 CHECK(old_capacity == new_capacity);
2353 }
2354
2355
2356 static int NumberOfGlobalObjects() {
2357 int count = 0;
2358 HeapIterator iterator(CcTest::heap());
2359 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2360 if (obj->IsJSGlobalObject()) count++;
2361 }
2362 return count;
2363 }
2364
2365
2366 // Test that we don't embed maps from foreign contexts into
2367 // optimized code.
2368 TEST(LeakNativeContextViaMap) {
2369 i::FLAG_allow_natives_syntax = true;
2370 v8::Isolate* isolate = CcTest::isolate();
2371 v8::HandleScope outer_scope(isolate);
2372 v8::Persistent<v8::Context> ctx1p;
2373 v8::Persistent<v8::Context> ctx2p;
2374 {
2375 v8::HandleScope scope(isolate);
2376 ctx1p.Reset(isolate, v8::Context::New(isolate));
2377 ctx2p.Reset(isolate, v8::Context::New(isolate));
2378 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2379 }
2380
2381 CcTest::heap()->CollectAllAvailableGarbage();
2382 CHECK_EQ(2, NumberOfGlobalObjects());
2383
2384 {
2385 v8::HandleScope inner_scope(isolate);
2386 CompileRun("var v = {x: 42}");
2387 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2388 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2389 v8::Local<v8::Value> v =
2390 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2391 ctx2->Enter();
2392 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2393 v8::Local<v8::Value> res = CompileRun(
2394 "function f() { return o.x; }"
2395 "for (var i = 0; i < 10; ++i) f();"
2396 "%OptimizeFunctionOnNextCall(f);"
2397 "f();");
2398 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2399 CHECK(ctx2->Global()
2400 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2401 .FromJust());
2402 ctx2->Exit();
2403 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2404 ctx1p.Reset();
2405 isolate->ContextDisposedNotification();
2406 }
2407 CcTest::heap()->CollectAllAvailableGarbage();
2408 CHECK_EQ(1, NumberOfGlobalObjects());
2409 ctx2p.Reset();
2410 CcTest::heap()->CollectAllAvailableGarbage();
2411 CHECK_EQ(0, NumberOfGlobalObjects());
2412 }
2413
2414
2415 // Test that we don't embed functions from foreign contexts into
2416 // optimized code.
2417 TEST(LeakNativeContextViaFunction) {
2418 i::FLAG_allow_natives_syntax = true;
2419 v8::Isolate* isolate = CcTest::isolate();
2420 v8::HandleScope outer_scope(isolate);
2421 v8::Persistent<v8::Context> ctx1p;
2422 v8::Persistent<v8::Context> ctx2p;
2423 {
2424 v8::HandleScope scope(isolate);
2425 ctx1p.Reset(isolate, v8::Context::New(isolate));
2426 ctx2p.Reset(isolate, v8::Context::New(isolate));
2427 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2428 }
2429
2430 CcTest::heap()->CollectAllAvailableGarbage();
2431 CHECK_EQ(2, NumberOfGlobalObjects());
2432
2433 {
2434 v8::HandleScope inner_scope(isolate);
2435 CompileRun("var v = function() { return 42; }");
2436 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2437 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2438 v8::Local<v8::Value> v =
2439 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2440 ctx2->Enter();
2441 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2442 v8::Local<v8::Value> res = CompileRun(
2443 "function f(x) { return x(); }"
2444 "for (var i = 0; i < 10; ++i) f(o);"
2445 "%OptimizeFunctionOnNextCall(f);"
2446 "f(o);");
2447 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2448 CHECK(ctx2->Global()
2449 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2450 .FromJust());
2451 ctx2->Exit();
2452 ctx1->Exit();
2453 ctx1p.Reset();
2454 isolate->ContextDisposedNotification();
2455 }
2456 CcTest::heap()->CollectAllAvailableGarbage();
2457 CHECK_EQ(1, NumberOfGlobalObjects());
2458 ctx2p.Reset();
2459 CcTest::heap()->CollectAllAvailableGarbage();
2460 CHECK_EQ(0, NumberOfGlobalObjects());
2461 }
2462
2463
2464 TEST(LeakNativeContextViaMapKeyed) {
2465 i::FLAG_allow_natives_syntax = true;
2466 v8::Isolate* isolate = CcTest::isolate();
2467 v8::HandleScope outer_scope(isolate);
2468 v8::Persistent<v8::Context> ctx1p;
2469 v8::Persistent<v8::Context> ctx2p;
2470 {
2471 v8::HandleScope scope(isolate);
2472 ctx1p.Reset(isolate, v8::Context::New(isolate));
2473 ctx2p.Reset(isolate, v8::Context::New(isolate));
2474 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2475 }
2476
2477 CcTest::heap()->CollectAllAvailableGarbage();
2478 CHECK_EQ(2, NumberOfGlobalObjects());
2479
2480 {
2481 v8::HandleScope inner_scope(isolate);
2482 CompileRun("var v = [42, 43]");
2483 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2484 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2485 v8::Local<v8::Value> v =
2486 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2487 ctx2->Enter();
2488 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2489 v8::Local<v8::Value> res = CompileRun(
2490 "function f() { return o[0]; }"
2491 "for (var i = 0; i < 10; ++i) f();"
2492 "%OptimizeFunctionOnNextCall(f);"
2493 "f();");
2494 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2495 CHECK(ctx2->Global()
2496 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2497 .FromJust());
2498 ctx2->Exit();
2499 ctx1->Exit();
2500 ctx1p.Reset();
2501 isolate->ContextDisposedNotification();
2502 }
2503 CcTest::heap()->CollectAllAvailableGarbage();
2504 CHECK_EQ(1, NumberOfGlobalObjects());
2505 ctx2p.Reset();
2506 CcTest::heap()->CollectAllAvailableGarbage();
2507 CHECK_EQ(0, NumberOfGlobalObjects());
2508 }
2509
2510
2511 TEST(LeakNativeContextViaMapProto) {
2512 i::FLAG_allow_natives_syntax = true;
2513 v8::Isolate* isolate = CcTest::isolate();
2514 v8::HandleScope outer_scope(isolate);
2515 v8::Persistent<v8::Context> ctx1p;
2516 v8::Persistent<v8::Context> ctx2p;
2517 {
2518 v8::HandleScope scope(isolate);
2519 ctx1p.Reset(isolate, v8::Context::New(isolate));
2520 ctx2p.Reset(isolate, v8::Context::New(isolate));
2521 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2522 }
2523
2524 CcTest::heap()->CollectAllAvailableGarbage();
2525 CHECK_EQ(2, NumberOfGlobalObjects());
2526
2527 {
2528 v8::HandleScope inner_scope(isolate);
2529 CompileRun("var v = { y: 42}");
2530 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2531 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2532 v8::Local<v8::Value> v =
2533 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2534 ctx2->Enter();
2535 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2536 v8::Local<v8::Value> res = CompileRun(
2537 "function f() {"
2538 " var p = {x: 42};"
2539 " p.__proto__ = o;"
2540 " return p.x;"
2541 "}"
2542 "for (var i = 0; i < 10; ++i) f();"
2543 "%OptimizeFunctionOnNextCall(f);"
2544 "f();");
2545 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2546 CHECK(ctx2->Global()
2547 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2548 .FromJust());
2549 ctx2->Exit();
2550 ctx1->Exit();
2551 ctx1p.Reset();
2552 isolate->ContextDisposedNotification();
2553 }
2554 CcTest::heap()->CollectAllAvailableGarbage();
2555 CHECK_EQ(1, NumberOfGlobalObjects());
2556 ctx2p.Reset();
2557 CcTest::heap()->CollectAllAvailableGarbage();
2558 CHECK_EQ(0, NumberOfGlobalObjects());
2559 }
2560
2561
2562 TEST(InstanceOfStubWriteBarrier) {
2563 i::FLAG_allow_natives_syntax = true;
2564 #ifdef VERIFY_HEAP
2565 i::FLAG_verify_heap = true;
2566 #endif
2567
2568 CcTest::InitializeVM();
2569 if (!CcTest::i_isolate()->use_crankshaft()) return;
2570 if (i::FLAG_force_marking_deque_overflows) return;
2571 v8::HandleScope outer_scope(CcTest::isolate());
2572 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2573
2574 {
2575 v8::HandleScope scope(CcTest::isolate());
2576 CompileRun(
2577 "function foo () { }"
2578 "function mkbar () { return new (new Function(\"\")) (); }"
2579 "function f (x) { return (x instanceof foo); }"
2580 "function g () { f(mkbar()); }"
2581 "f(new foo()); f(new foo());"
2582 "%OptimizeFunctionOnNextCall(f);"
2583 "f(new foo()); g();");
2584 }
2585
2586 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2587 marking->Stop();
2588 CcTest::heap()->StartIncrementalMarking();
2589
2590 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2591 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2592 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2593
2594 CHECK(f->IsOptimized());
2595
2596 while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
2597 !marking->IsStopped()) {
2598 // Discard any pending GC requests otherwise we will get GC when we enter
2599 // code below.
2600 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
2601 }
2602
2603 CHECK(marking->IsMarking());
2604
2605 {
2606 v8::HandleScope scope(CcTest::isolate());
2607 v8::Local<v8::Object> global = CcTest::global();
2608 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2609 global->Get(ctx, v8_str("g")).ToLocalChecked());
2610 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2611 }
2612
2613 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2614 CcTest::heap()->CollectGarbage(OLD_SPACE);
2615 }
2616
2617
2618 TEST(PrototypeTransitionClearing) {
2619 if (FLAG_never_compact) return;
2620 CcTest::InitializeVM();
2621 Isolate* isolate = CcTest::i_isolate();
2622 Factory* factory = isolate->factory();
2623 v8::HandleScope scope(CcTest::isolate());
2624 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2625
2626 CompileRun("var base = {};");
2627 i::Handle<JSReceiver> baseObject =
2628 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2629 CcTest::global()->Get(ctx, v8_str("base")).ToLocalChecked()));
2630
2631 int initialTransitions =
2632 TransitionArray::NumberOfPrototypeTransitionsForTest(baseObject->map());
2633
2634 CompileRun(
2635 "var live = [];"
2636 "for (var i = 0; i < 10; i++) {"
2637 " var object = {};"
2638 " var prototype = {};"
2639 " object.__proto__ = prototype;"
2640 " if (i >= 3) live.push(object, prototype);"
2641 "}");
2642
2643 // Verify that only dead prototype transitions are cleared.
2644 CHECK_EQ(
2645 initialTransitions + 10,
2646 TransitionArray::NumberOfPrototypeTransitionsForTest(baseObject->map()));
2647 CcTest::heap()->CollectAllGarbage();
2648 const int transitions = 10 - 3;
2649 CHECK_EQ(
2650 initialTransitions + transitions,
2651 TransitionArray::NumberOfPrototypeTransitionsForTest(baseObject->map()));
2652
2653 // Verify that prototype transitions array was compacted.
2654 FixedArray* trans =
2655 TransitionArray::GetPrototypeTransitions(baseObject->map());
2656 for (int i = initialTransitions; i < initialTransitions + transitions; i++) {
2657 int j = TransitionArray::kProtoTransitionHeaderSize + i;
2658 CHECK(trans->get(j)->IsWeakCell());
2659 CHECK(WeakCell::cast(trans->get(j))->value()->IsMap());
2660 }
2661
2662 // Make sure next prototype is placed on an old-space evacuation candidate.
2663 Handle<JSObject> prototype;
2664 PagedSpace* space = CcTest::heap()->old_space();
2665 {
2666 AlwaysAllocateScope always_allocate(isolate);
2667 SimulateFullSpace(space);
2668 prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS,
2669 Strength::WEAK, TENURED);
2670 }
2671
2672 // Add a prototype on an evacuation candidate and verify that transition
2673 // clearing correctly records slots in prototype transition array.
2674 i::FLAG_always_compact = true;
2675 Handle<Map> map(baseObject->map());
2676 CHECK(!space->LastPage()->Contains(
2677 TransitionArray::GetPrototypeTransitions(*map)->address()));
2678 CHECK(space->LastPage()->Contains(prototype->address()));
2679 }
2680
2681
2682 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
2683 i::FLAG_stress_compaction = false;
2684 i::FLAG_allow_natives_syntax = true;
2685 #ifdef VERIFY_HEAP
2686 i::FLAG_verify_heap = true;
2687 #endif
2688
2689 CcTest::InitializeVM();
2690 if (!CcTest::i_isolate()->use_crankshaft()) return;
2691 v8::HandleScope outer_scope(CcTest::isolate());
2692 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2693
2694 {
2695 v8::HandleScope scope(CcTest::isolate());
2696 CompileRun(
2697 "function f () {"
2698 " var s = 0;"
2699 " for (var i = 0; i < 100; i++) s += i;"
2700 " return s;"
2701 "}"
2702 "f(); f();"
2703 "%OptimizeFunctionOnNextCall(f);"
2704 "f();");
2705 }
2706 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2707 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2708 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2709 CHECK(f->IsOptimized());
2710
2711 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2712 marking->Stop();
2713 CcTest::heap()->StartIncrementalMarking();
2714 // The following calls will increment CcTest::heap()->global_ic_age().
2715 CcTest::isolate()->ContextDisposedNotification();
2716 SimulateIncrementalMarking(CcTest::heap());
2717 CcTest::heap()->CollectAllGarbage();
2718 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2719 CHECK_EQ(0, f->shared()->opt_count());
2720 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2721 }
2722
2723
2724 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2725 i::FLAG_stress_compaction = false;
2726 i::FLAG_allow_natives_syntax = true;
2727 #ifdef VERIFY_HEAP
2728 i::FLAG_verify_heap = true;
2729 #endif
2730
2731 CcTest::InitializeVM();
2732 if (!CcTest::i_isolate()->use_crankshaft()) return;
2733 v8::HandleScope outer_scope(CcTest::isolate());
2734 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2735
2736 {
2737 v8::HandleScope scope(CcTest::isolate());
2738 CompileRun(
2739 "function f () {"
2740 " var s = 0;"
2741 " for (var i = 0; i < 100; i++) s += i;"
2742 " return s;"
2743 "}"
2744 "f(); f();"
2745 "%OptimizeFunctionOnNextCall(f);"
2746 "f();");
2747 }
2748 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2749 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2750 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2751
2752 CHECK(f->IsOptimized());
2753
2754 CcTest::heap()->incremental_marking()->Stop();
2755
2756 // The following two calls will increment CcTest::heap()->global_ic_age().
2757 CcTest::isolate()->ContextDisposedNotification();
2758 CcTest::heap()->CollectAllGarbage();
2759
2760 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2761 CHECK_EQ(0, f->shared()->opt_count());
2762 CHECK_EQ(0, f->shared()->code()->profiler_ticks());
2763 }
2764
2765
2766 HEAP_TEST(GCFlags) {
2767 CcTest::InitializeVM();
2768 Heap* heap = CcTest::heap();
2769
2770 heap->set_current_gc_flags(Heap::kNoGCFlags);
2771 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2772
2773 // Set the flags to check whether we appropriately resets them after the GC.
2774 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2775 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2776 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2777
2778 MarkCompactCollector* collector = heap->mark_compact_collector();
2779 if (collector->sweeping_in_progress()) {
2780 collector->EnsureSweepingCompleted();
2781 }
2782
2783 IncrementalMarking* marking = heap->incremental_marking();
2784 marking->Stop();
2785 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2786 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2787
2788 heap->CollectGarbage(NEW_SPACE);
2789 // NewSpace scavenges should not overwrite the flags.
2790 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2791
2792 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2793 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2794 }
2795
2796
2797 TEST(IdleNotificationFinishMarking) {
2798 i::FLAG_allow_natives_syntax = true;
2799 CcTest::InitializeVM();
2800 SimulateFullSpace(CcTest::heap()->old_space());
2801 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2802 marking->Stop();
2803 CcTest::heap()->StartIncrementalMarking();
2804
2805 CHECK_EQ(CcTest::heap()->gc_count(), 0);
2806
2807 // TODO(hpayer): We cannot write proper unit test right now for heap.
2808 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2809 // marking delay counter.
2810
2811 // Perform a huge incremental marking step but don't complete marking.
2812 intptr_t bytes_processed = 0;
2813 do {
2814 bytes_processed =
2815 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2816 IncrementalMarking::FORCE_MARKING,
2817 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2818 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2819 } while (bytes_processed);
2820
2821 // The next invocations of incremental marking are not going to complete
2822 // marking
2823 // since the completion threshold is not reached
2824 for (size_t i = 0; i < IncrementalMarking::kMaxIdleMarkingDelayCounter - 2;
2825 i++) {
2826 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2827 IncrementalMarking::FORCE_MARKING,
2828 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
2829 CHECK(!marking->IsIdleMarkingDelayCounterLimitReached());
2830 }
2831
2832 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2833
2834 // The next idle notification has to finish incremental marking.
2835 const double kLongIdleTime = 1000.0;
2836 CcTest::isolate()->IdleNotificationDeadline(
2837 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2838 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2839 kLongIdleTime);
2840 CHECK_EQ(CcTest::heap()->gc_count(), 1);
2841 }
2842
2843
2844 // Test that HAllocateObject will always return an object in new-space.
2845 TEST(OptimizedAllocationAlwaysInNewSpace) {
2846 i::FLAG_allow_natives_syntax = true;
2847 CcTest::InitializeVM();
2848 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2849 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2850 v8::HandleScope scope(CcTest::isolate());
2851 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2852 SimulateFullSpace(CcTest::heap()->new_space());
2853 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2854 v8::Local<v8::Value> res = CompileRun(
2855 "function c(x) {"
2856 " this.x = x;"
2857 " for (var i = 0; i < 32; i++) {"
2858 " this['x' + i] = x;"
2859 " }"
2860 "}"
2861 "function f(x) { return new c(x); };"
2862 "f(1); f(2); f(3);"
2863 "%OptimizeFunctionOnNextCall(f);"
2864 "f(4);");
2865
2866 CHECK_EQ(4, res.As<v8::Object>()
2867 ->GetRealNamedProperty(ctx, v8_str("x"))
2868 .ToLocalChecked()
2869 ->Int32Value(ctx)
2870 .FromJust());
2871
2872 i::Handle<JSReceiver> o =
2873 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2874
2875 CHECK(CcTest::heap()->InNewSpace(*o));
2876 }
2877
2878
2879 TEST(OptimizedPretenuringAllocationFolding) {
2880 i::FLAG_allow_natives_syntax = true;
2881 i::FLAG_expose_gc = true;
2882 CcTest::InitializeVM();
2883 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2884 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2885 v8::HandleScope scope(CcTest::isolate());
2886 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2887 // Grow new space unitl maximum capacity reached.
2888 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2889 CcTest::heap()->new_space()->Grow();
2890 }
2891
2892 i::ScopedVector<char> source(1024);
2893 i::SNPrintF(
2894 source,
2895 "var number_elements = %d;"
2896 "var elements = new Array();"
2897 "function f() {"
2898 " for (var i = 0; i < number_elements; i++) {"
2899 " elements[i] = [[{}], [1.1]];"
2900 " }"
2901 " return elements[number_elements-1]"
2902 "};"
2903 "f(); gc();"
2904 "f(); f();"
2905 "%%OptimizeFunctionOnNextCall(f);"
2906 "f();",
2907 AllocationSite::kPretenureMinimumCreated);
2908
2909 v8::Local<v8::Value> res = CompileRun(source.start());
2910
2911 v8::Local<v8::Value> int_array =
2912 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2913 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2914 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2915 v8::Local<v8::Value> double_array =
2916 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2917 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2918 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2919
2920 i::Handle<JSReceiver> o =
2921 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2922 CHECK(CcTest::heap()->InOldSpace(*o));
2923 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2924 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2925 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2926 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2927 }
2928
2929
2930 TEST(OptimizedPretenuringObjectArrayLiterals) {
2931 i::FLAG_allow_natives_syntax = true;
2932 i::FLAG_expose_gc = true;
2933 CcTest::InitializeVM();
2934 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2935 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2936 v8::HandleScope scope(CcTest::isolate());
2937
2938 // Grow new space unitl maximum capacity reached.
2939 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2940 CcTest::heap()->new_space()->Grow();
2941 }
2942
2943 i::ScopedVector<char> source(1024);
2944 i::SNPrintF(
2945 source,
2946 "var number_elements = %d;"
2947 "var elements = new Array(number_elements);"
2948 "function f() {"
2949 " for (var i = 0; i < number_elements; i++) {"
2950 " elements[i] = [{}, {}, {}];"
2951 " }"
2952 " return elements[number_elements - 1];"
2953 "};"
2954 "f(); gc();"
2955 "f(); f();"
2956 "%%OptimizeFunctionOnNextCall(f);"
2957 "f();",
2958 AllocationSite::kPretenureMinimumCreated);
2959
2960 v8::Local<v8::Value> res = CompileRun(source.start());
2961
2962 i::Handle<JSObject> o = Handle<JSObject>::cast(
2963 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2964
2965 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2966 CHECK(CcTest::heap()->InOldSpace(*o));
2967 }
2968
2969
2970 TEST(OptimizedPretenuringMixedInObjectProperties) {
2971 i::FLAG_allow_natives_syntax = true;
2972 i::FLAG_expose_gc = true;
2973 CcTest::InitializeVM();
2974 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
2975 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
2976 v8::HandleScope scope(CcTest::isolate());
2977
2978 // Grow new space unitl maximum capacity reached.
2979 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2980 CcTest::heap()->new_space()->Grow();
2981 }
2982
2983
2984 i::ScopedVector<char> source(1024);
2985 i::SNPrintF(
2986 source,
2987 "var number_elements = %d;"
2988 "var elements = new Array(number_elements);"
2989 "function f() {"
2990 " for (var i = 0; i < number_elements; i++) {"
2991 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2992 " }"
2993 " return elements[number_elements - 1];"
2994 "};"
2995 "f(); gc();"
2996 "f(); f();"
2997 "%%OptimizeFunctionOnNextCall(f);"
2998 "f();",
2999 AllocationSite::kPretenureMinimumCreated);
3000
3001 v8::Local<v8::Value> res = CompileRun(source.start());
3002
3003 i::Handle<JSObject> o = Handle<JSObject>::cast(
3004 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3005
3006 CHECK(CcTest::heap()->InOldSpace(*o));
3007 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
3008 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
3009 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
3010 if (!o->IsUnboxedDoubleField(idx2)) {
3011 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
3012 } else {
3013 CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
3014 }
3015
3016 JSObject* inner_object =
3017 reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
3018 CHECK(CcTest::heap()->InOldSpace(inner_object));
3019 if (!inner_object->IsUnboxedDoubleField(idx1)) {
3020 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
3021 } else {
3022 CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
3023 }
3024 CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
3025 }
3026
3027
3028 TEST(OptimizedPretenuringDoubleArrayProperties) {
3029 i::FLAG_allow_natives_syntax = true;
3030 i::FLAG_expose_gc = true;
3031 CcTest::InitializeVM();
3032 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3033 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3034 v8::HandleScope scope(CcTest::isolate());
3035
3036 // Grow new space unitl maximum capacity reached.
3037 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3038 CcTest::heap()->new_space()->Grow();
3039 }
3040
3041 i::ScopedVector<char> source(1024);
3042 i::SNPrintF(
3043 source,
3044 "var number_elements = %d;"
3045 "var elements = new Array(number_elements);"
3046 "function f() {"
3047 " for (var i = 0; i < number_elements; i++) {"
3048 " elements[i] = {a: 1.1, b: 2.2};"
3049 " }"
3050 " return elements[i - 1];"
3051 "};"
3052 "f(); gc();"
3053 "f(); f();"
3054 "%%OptimizeFunctionOnNextCall(f);"
3055 "f();",
3056 AllocationSite::kPretenureMinimumCreated);
3057
3058 v8::Local<v8::Value> res = CompileRun(source.start());
3059
3060 i::Handle<JSObject> o = Handle<JSObject>::cast(
3061 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3062
3063 CHECK(CcTest::heap()->InOldSpace(*o));
3064 CHECK(CcTest::heap()->InOldSpace(o->properties()));
3065 }
3066
3067
3068 TEST(OptimizedPretenuringdoubleArrayLiterals) {
3069 i::FLAG_allow_natives_syntax = true;
3070 i::FLAG_expose_gc = true;
3071 CcTest::InitializeVM();
3072 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3073 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3074 v8::HandleScope scope(CcTest::isolate());
3075
3076 // Grow new space unitl maximum capacity reached.
3077 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3078 CcTest::heap()->new_space()->Grow();
3079 }
3080
3081 i::ScopedVector<char> source(1024);
3082 i::SNPrintF(
3083 source,
3084 "var number_elements = %d;"
3085 "var elements = new Array(number_elements);"
3086 "function f() {"
3087 " for (var i = 0; i < number_elements; i++) {"
3088 " elements[i] = [1.1, 2.2, 3.3];"
3089 " }"
3090 " return elements[number_elements - 1];"
3091 "};"
3092 "f(); gc();"
3093 "f(); f();"
3094 "%%OptimizeFunctionOnNextCall(f);"
3095 "f();",
3096 AllocationSite::kPretenureMinimumCreated);
3097
3098 v8::Local<v8::Value> res = CompileRun(source.start());
3099
3100 i::Handle<JSObject> o = Handle<JSObject>::cast(
3101 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3102
3103 CHECK(CcTest::heap()->InOldSpace(o->elements()));
3104 CHECK(CcTest::heap()->InOldSpace(*o));
3105 }
3106
3107
3108 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
3109 i::FLAG_allow_natives_syntax = true;
3110 i::FLAG_expose_gc = true;
3111 CcTest::InitializeVM();
3112 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3113 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3114 v8::HandleScope scope(CcTest::isolate());
3115 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3116 // Grow new space unitl maximum capacity reached.
3117 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3118 CcTest::heap()->new_space()->Grow();
3119 }
3120
3121 i::ScopedVector<char> source(1024);
3122 i::SNPrintF(
3123 source,
3124 "var number_elements = 100;"
3125 "var elements = new Array(number_elements);"
3126 "function f() {"
3127 " for (var i = 0; i < number_elements; i++) {"
3128 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
3129 " }"
3130 " return elements[number_elements - 1];"
3131 "};"
3132 "f(); gc();"
3133 "f(); f();"
3134 "%%OptimizeFunctionOnNextCall(f);"
3135 "f();");
3136
3137 v8::Local<v8::Value> res = CompileRun(source.start());
3138
3139 v8::Local<v8::Value> int_array =
3140 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3141 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
3142 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
3143 v8::Local<v8::Value> double_array =
3144 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3145 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
3146 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
3147
3148 Handle<JSObject> o = Handle<JSObject>::cast(
3149 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3150 CHECK(CcTest::heap()->InOldSpace(*o));
3151 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
3152 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
3153 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
3154 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
3155 }
3156
3157
3158 TEST(OptimizedPretenuringNestedObjectLiterals) {
3159 i::FLAG_allow_natives_syntax = true;
3160 i::FLAG_expose_gc = true;
3161 CcTest::InitializeVM();
3162 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3163 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3164 v8::HandleScope scope(CcTest::isolate());
3165 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3166 // Grow new space unitl maximum capacity reached.
3167 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3168 CcTest::heap()->new_space()->Grow();
3169 }
3170
3171 i::ScopedVector<char> source(1024);
3172 i::SNPrintF(
3173 source,
3174 "var number_elements = %d;"
3175 "var elements = new Array(number_elements);"
3176 "function f() {"
3177 " for (var i = 0; i < number_elements; i++) {"
3178 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
3179 " }"
3180 " return elements[number_elements - 1];"
3181 "};"
3182 "f(); gc();"
3183 "f(); f();"
3184 "%%OptimizeFunctionOnNextCall(f);"
3185 "f();",
3186 AllocationSite::kPretenureMinimumCreated);
3187
3188 v8::Local<v8::Value> res = CompileRun(source.start());
3189
3190 v8::Local<v8::Value> int_array_1 =
3191 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3192 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
3193 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
3194 v8::Local<v8::Value> int_array_2 =
3195 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3196 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
3197 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
3198
3199 Handle<JSObject> o = Handle<JSObject>::cast(
3200 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3201 CHECK(CcTest::heap()->InOldSpace(*o));
3202 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
3203 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
3204 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
3205 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
3206 }
3207
3208
3209 TEST(OptimizedPretenuringNestedDoubleLiterals) {
3210 i::FLAG_allow_natives_syntax = true;
3211 i::FLAG_expose_gc = true;
3212 CcTest::InitializeVM();
3213 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3214 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3215 v8::HandleScope scope(CcTest::isolate());
3216 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3217 // Grow new space unitl maximum capacity reached.
3218 while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
3219 CcTest::heap()->new_space()->Grow();
3220 }
3221
3222 i::ScopedVector<char> source(1024);
3223 i::SNPrintF(
3224 source,
3225 "var number_elements = %d;"
3226 "var elements = new Array(number_elements);"
3227 "function f() {"
3228 " for (var i = 0; i < number_elements; i++) {"
3229 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
3230 " }"
3231 " return elements[number_elements - 1];"
3232 "};"
3233 "f(); gc();"
3234 "f(); f();"
3235 "%%OptimizeFunctionOnNextCall(f);"
3236 "f();",
3237 AllocationSite::kPretenureMinimumCreated);
3238
3239 v8::Local<v8::Value> res = CompileRun(source.start());
3240
3241 v8::Local<v8::Value> double_array_1 =
3242 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
3243 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
3244 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
3245 v8::Local<v8::Value> double_array_2 =
3246 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
3247 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
3248 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
3249
3250 i::Handle<JSObject> o = Handle<JSObject>::cast(
3251 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3252 CHECK(CcTest::heap()->InOldSpace(*o));
3253 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
3254 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
3255 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
3256 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
3257 }
3258
3259
3260 // Test regular array literals allocation.
3261 TEST(OptimizedAllocationArrayLiterals) {
3262 i::FLAG_allow_natives_syntax = true;
3263 CcTest::InitializeVM();
3264 if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
3265 if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
3266 v8::HandleScope scope(CcTest::isolate());
3267 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3268 v8::Local<v8::Value> res = CompileRun(
3269 "function f() {"
3270 " var numbers = new Array(1, 2, 3);"
3271 " numbers[0] = 3.14;"
3272 " return numbers;"
3273 "};"
3274 "f(); f(); f();"
3275 "%OptimizeFunctionOnNextCall(f);"
3276 "f();");
3277 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
3278 ->Get(ctx, v8_str("0"))
3279 .ToLocalChecked()
3280 ->Int32Value(ctx)
3281 .FromJust());
3282
3283 i::Handle<JSObject> o = Handle<JSObject>::cast(
3284 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
3285
3286 CHECK(CcTest::heap()->InNewSpace(o->elements()));
3287 }
3288
3289
3290 static int CountMapTransitions(Map* map) {
3291 return TransitionArray::NumberOfTransitions(map->raw_transitions());
3292 }
3293
3294
3295 // Test that map transitions are cleared and maps are collected with
3296 // incremental marking as well.
3297 TEST(Regress1465) {
3298 i::FLAG_stress_compaction = false;
3299 i::FLAG_allow_natives_syntax = true;
3300 i::FLAG_trace_incremental_marking = true;
3301 i::FLAG_retain_maps_for_n_gc = 0;
3302 CcTest::InitializeVM();
3303 v8::HandleScope scope(CcTest::isolate());
3304 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3305 static const int transitions_count = 256;
3306
3307 CompileRun("function F() {}");
3308 {
3309 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3310 for (int i = 0; i < transitions_count; i++) {
3311 EmbeddedVector<char, 64> buffer;
3312 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3313 CompileRun(buffer.start());
3314 }
3315 CompileRun("var root = new F;");
3316 }
3317
3318 i::Handle<JSReceiver> root =
3319 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3320 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3321
3322 // Count number of live transitions before marking.
3323 int transitions_before = CountMapTransitions(root->map());
3324 CompileRun("%DebugPrint(root);");
3325 CHECK_EQ(transitions_count, transitions_before);
3326
3327 SimulateIncrementalMarking(CcTest::heap());
3328 CcTest::heap()->CollectAllGarbage();
3329
3330 // Count number of live transitions after marking. Note that one transition
3331 // is left, because 'o' still holds an instance of one transition target.
3332 int transitions_after = CountMapTransitions(root->map());
3333 CompileRun("%DebugPrint(root);");
3334 CHECK_EQ(1, transitions_after);
3335 }
3336
3337
3338 #ifdef DEBUG
3339 static void AddTransitions(int transitions_count) {
3340 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3341 for (int i = 0; i < transitions_count; i++) {
3342 EmbeddedVector<char, 64> buffer;
3343 SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3344 CompileRun(buffer.start());
3345 }
3346 }
3347
3348
3349 static i::Handle<JSObject> GetByName(const char* name) {
3350 return i::Handle<JSObject>::cast(
3351 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3352 CcTest::global()
3353 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3354 .ToLocalChecked())));
3355 }
3356
3357
3358 static void AddPropertyTo(
3359 int gc_count, Handle<JSObject> object, const char* property_name) {
3360 Isolate* isolate = CcTest::i_isolate();
3361 Factory* factory = isolate->factory();
3362 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3363 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3364 i::FLAG_gc_interval = gc_count;
3365 i::FLAG_gc_global = true;
3366 i::FLAG_retain_maps_for_n_gc = 0;
3367 CcTest::heap()->set_allocation_timeout(gc_count);
3368 JSReceiver::SetProperty(object, prop_name, twenty_three, SLOPPY).Check();
3369 }
3370
3371
3372 TEST(TransitionArrayShrinksDuringAllocToZero) {
3373 i::FLAG_stress_compaction = false;
3374 i::FLAG_allow_natives_syntax = true;
3375 CcTest::InitializeVM();
3376 v8::HandleScope scope(CcTest::isolate());
3377 static const int transitions_count = 10;
3378 CompileRun("function F() { }");
3379 AddTransitions(transitions_count);
3380 CompileRun("var root = new F;");
3381 Handle<JSObject> root = GetByName("root");
3382
3383 // Count number of live transitions before marking.
3384 int transitions_before = CountMapTransitions(root->map());
3385 CHECK_EQ(transitions_count, transitions_before);
3386
3387 // Get rid of o
3388 CompileRun("o = new F;"
3389 "root = new F");
3390 root = GetByName("root");
3391 AddPropertyTo(2, root, "funny");
3392 CcTest::heap()->CollectGarbage(NEW_SPACE);
3393
3394 // Count number of live transitions after marking. Note that one transition
3395 // is left, because 'o' still holds an instance of one transition target.
3396 int transitions_after = CountMapTransitions(
3397 Map::cast(root->map()->GetBackPointer()));
3398 CHECK_EQ(1, transitions_after);
3399 }
3400
3401
3402 TEST(TransitionArrayShrinksDuringAllocToOne) {
3403 i::FLAG_stress_compaction = false;
3404 i::FLAG_allow_natives_syntax = true;
3405 CcTest::InitializeVM();
3406 v8::HandleScope scope(CcTest::isolate());
3407 static const int transitions_count = 10;
3408 CompileRun("function F() {}");
3409 AddTransitions(transitions_count);
3410 CompileRun("var root = new F;");
3411 Handle<JSObject> root = GetByName("root");
3412
3413 // Count number of live transitions before marking.
3414 int transitions_before = CountMapTransitions(root->map());
3415 CHECK_EQ(transitions_count, transitions_before);
3416
3417 root = GetByName("root");
3418 AddPropertyTo(2, root, "funny");
3419 CcTest::heap()->CollectGarbage(NEW_SPACE);
3420
3421 // Count number of live transitions after marking. Note that one transition
3422 // is left, because 'o' still holds an instance of one transition target.
3423 int transitions_after = CountMapTransitions(
3424 Map::cast(root->map()->GetBackPointer()));
3425 CHECK_EQ(2, transitions_after);
3426 }
3427
3428
3429 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3430 i::FLAG_stress_compaction = false;
3431 i::FLAG_allow_natives_syntax = true;
3432 CcTest::InitializeVM();
3433 v8::HandleScope scope(CcTest::isolate());
3434 static const int transitions_count = 10;
3435 CompileRun("function F() {}");
3436 AddTransitions(transitions_count);
3437 CompileRun("var root = new F;");
3438 Handle<JSObject> root = GetByName("root");
3439
3440 // Count number of live transitions before marking.
3441 int transitions_before = CountMapTransitions(root->map());
3442 CHECK_EQ(transitions_count, transitions_before);
3443
3444 root = GetByName("root");
3445 AddPropertyTo(0, root, "prop9");
3446 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
3447
3448 // Count number of live transitions after marking. Note that one transition
3449 // is left, because 'o' still holds an instance of one transition target.
3450 int transitions_after = CountMapTransitions(
3451 Map::cast(root->map()->GetBackPointer()));
3452 CHECK_EQ(1, transitions_after);
3453 }
3454
3455
3456 TEST(TransitionArraySimpleToFull) {
3457 i::FLAG_stress_compaction = false;
3458 i::FLAG_allow_natives_syntax = true;
3459 CcTest::InitializeVM();
3460 v8::HandleScope scope(CcTest::isolate());
3461 static const int transitions_count = 1;
3462 CompileRun("function F() {}");
3463 AddTransitions(transitions_count);
3464 CompileRun("var root = new F;");
3465 Handle<JSObject> root = GetByName("root");
3466
3467 // Count number of live transitions before marking.
3468 int transitions_before = CountMapTransitions(root->map());
3469 CHECK_EQ(transitions_count, transitions_before);
3470
3471 CompileRun("o = new F;"
3472 "root = new F");
3473 root = GetByName("root");
3474 CHECK(TransitionArray::IsSimpleTransition(root->map()->raw_transitions()));
3475 AddPropertyTo(2, root, "happy");
3476
3477 // Count number of live transitions after marking. Note that one transition
3478 // is left, because 'o' still holds an instance of one transition target.
3479 int transitions_after = CountMapTransitions(
3480 Map::cast(root->map()->GetBackPointer()));
3481 CHECK_EQ(1, transitions_after);
3482 }
3483 #endif // DEBUG
3484
3485
3486 TEST(Regress2143a) {
3487 i::FLAG_incremental_marking = true;
3488 CcTest::InitializeVM();
3489 v8::HandleScope scope(CcTest::isolate());
3490
3491 // Prepare a map transition from the root object together with a yet
3492 // untransitioned root object.
3493 CompileRun("var root = new Object;"
3494 "root.foo = 0;"
3495 "root = new Object;");
3496
3497 SimulateIncrementalMarking(CcTest::heap());
3498
3499 // Compile a StoreIC that performs the prepared map transition. This
3500 // will restart incremental marking and should make sure the root is
3501 // marked grey again.
3502 CompileRun("function f(o) {"
3503 " o.foo = 0;"
3504 "}"
3505 "f(new Object);"
3506 "f(root);");
3507
3508 // This bug only triggers with aggressive IC clearing.
3509 CcTest::heap()->AgeInlineCaches();
3510
3511 // Explicitly request GC to perform final marking step and sweeping.
3512 CcTest::heap()->CollectAllGarbage();
3513
3514 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3515 CcTest::global()
3516 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3517 .ToLocalChecked()));
3518
3519 // The root object should be in a sane state.
3520 CHECK(root->IsJSObject());
3521 CHECK(root->map()->IsMap());
3522 }
3523
3524
3525 TEST(Regress2143b) {
3526 i::FLAG_incremental_marking = true;
3527 i::FLAG_allow_natives_syntax = true;
3528 CcTest::InitializeVM();
3529 v8::HandleScope scope(CcTest::isolate());
3530
3531 // Prepare a map transition from the root object together with a yet
3532 // untransitioned root object.
3533 CompileRun("var root = new Object;"
3534 "root.foo = 0;"
3535 "root = new Object;");
3536
3537 SimulateIncrementalMarking(CcTest::heap());
3538
3539 // Compile an optimized LStoreNamedField that performs the prepared
3540 // map transition. This will restart incremental marking and should
3541 // make sure the root is marked grey again.
3542 CompileRun("function f(o) {"
3543 " o.foo = 0;"
3544 "}"
3545 "f(new Object);"
3546 "f(new Object);"
3547 "%OptimizeFunctionOnNextCall(f);"
3548 "f(root);"
3549 "%DeoptimizeFunction(f);");
3550
3551 // This bug only triggers with aggressive IC clearing.
3552 CcTest::heap()->AgeInlineCaches();
3553
3554 // Explicitly request GC to perform final marking step and sweeping.
3555 CcTest::heap()->CollectAllGarbage();
3556
3557 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3558 CcTest::global()
3559 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3560 .ToLocalChecked()));
3561
3562 // The root object should be in a sane state.
3563 CHECK(root->IsJSObject());
3564 CHECK(root->map()->IsMap());
3565 }
3566
3567
3568 TEST(ReleaseOverReservedPages) {
3569 if (FLAG_never_compact) return;
3570 i::FLAG_trace_gc = true;
3571 // The optimizer can allocate stuff, messing up the test.
3572 i::FLAG_crankshaft = false;
3573 i::FLAG_always_opt = false;
3574 CcTest::InitializeVM();
3575 Isolate* isolate = CcTest::i_isolate();
3576 Factory* factory = isolate->factory();
3577 Heap* heap = isolate->heap();
3578 v8::HandleScope scope(CcTest::isolate());
3579 static const int number_of_test_pages = 20;
3580
3581 // Prepare many pages with low live-bytes count.
3582 PagedSpace* old_space = heap->old_space();
3583 CHECK_EQ(1, old_space->CountTotalPages());
3584 for (int i = 0; i < number_of_test_pages; i++) {
3585 AlwaysAllocateScope always_allocate(isolate);
3586 SimulateFullSpace(old_space);
3587 factory->NewFixedArray(1, TENURED);
3588 }
3589 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3590
3591 // Triggering one GC will cause a lot of garbage to be discovered but
3592 // even spread across all allocated pages.
3593 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3594 "triggered for preparation");
3595 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3596
3597 // Triggering subsequent GCs should cause at least half of the pages
3598 // to be released to the OS after at most two cycles.
3599 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3600 "triggered by test 1");
3601 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3602 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3603 "triggered by test 2");
3604 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3605
3606 // Triggering a last-resort GC should cause all pages to be released to the
3607 // OS so that other processes can seize the memory. If we get a failure here
3608 // where there are 2 pages left instead of 1, then we should increase the
3609 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3610 // first page should be small in order to reduce memory used when the VM
3611 // boots, but if the 20 small arrays don't fit on the first page then that's
3612 // an indication that it is too small.
3613 heap->CollectAllAvailableGarbage("triggered really hard");
3614 CHECK_EQ(1, old_space->CountTotalPages());
3615 }
3616
3617 static int forced_gc_counter = 0;
3618
3619 void MockUseCounterCallback(v8::Isolate* isolate,
3620 v8::Isolate::UseCounterFeature feature) {
3621 isolate->GetCurrentContext();
3622 if (feature == v8::Isolate::kForcedGC) {
3623 forced_gc_counter++;
3624 }
3625 }
3626
3627
3628 TEST(CountForcedGC) {
3629 i::FLAG_expose_gc = true;
3630 CcTest::InitializeVM();
3631 Isolate* isolate = CcTest::i_isolate();
3632 v8::HandleScope scope(CcTest::isolate());
3633
3634 isolate->SetUseCounterCallback(MockUseCounterCallback);
3635
3636 forced_gc_counter = 0;
3637 const char* source = "gc();";
3638 CompileRun(source);
3639 CHECK_GT(forced_gc_counter, 0);
3640 }
3641
3642
3643 #ifdef OBJECT_PRINT
3644 TEST(PrintSharedFunctionInfo) {
3645 CcTest::InitializeVM();
3646 v8::HandleScope scope(CcTest::isolate());
3647 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3648 const char* source = "f = function() { return 987654321; }\n"
3649 "g = function() { return 123456789; }\n";
3650 CompileRun(source);
3651 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3652 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3653 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3654
3655 OFStream os(stdout);
3656 g->shared()->Print(os);
3657 os << std::endl;
3658 }
3659 #endif // OBJECT_PRINT
3660
3661
3662 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3663 if (i::FLAG_always_opt) return;
3664 CcTest::InitializeVM();
3665 v8::HandleScope scope(CcTest::isolate());
3666 v8::Local<v8::Value> fun1, fun2;
3667 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3668 {
3669 CompileRun("function fun() {};");
3670 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3671 }
3672
3673 {
3674 CompileRun("function fun() {};");
3675 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3676 }
3677
3678 // Prepare function f that contains type feedback for the two closures.
3679 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3680 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3681 CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3682
3683 Handle<JSFunction> f = Handle<JSFunction>::cast(
3684 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3685 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3686
3687 Handle<TypeFeedbackVector> feedback_vector(f->shared()->feedback_vector());
3688 FeedbackVectorHelper feedback_helper(feedback_vector);
3689
3690 int expected_slots = 2;
3691 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3692 int slot1 = 0;
3693 int slot2 = 1;
3694 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3695 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3696
3697 SimulateIncrementalMarking(CcTest::heap());
3698 CcTest::heap()->CollectAllGarbage();
3699
3700 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3701 ->cleared());
3702 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3703 ->cleared());
3704 }
3705
3706
3707 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3708 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
3709 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
3710 for (RelocIterator it(code, mask); !it.done(); it.next()) {
3711 RelocInfo* info = it.rinfo();
3712 Code* target = Code::GetCodeFromTargetAddress(info->target_address());
3713 if (target->is_inline_cache_stub() && target->kind() == kind) {
3714 return target;
3715 }
3716 }
3717 return NULL;
3718 }
3719
3720
3721 static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3722 InlineCacheState desired_state) {
3723 Handle<TypeFeedbackVector> vector =
3724 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3725 FeedbackVectorHelper helper(vector);
3726 FeedbackVectorSlot slot = helper.slot(slot_index);
3727 if (vector->GetKind(slot) == FeedbackVectorSlotKind::LOAD_IC) {
3728 LoadICNexus nexus(vector, slot);
3729 CHECK(nexus.StateFromFeedback() == desired_state);
3730 } else {
3731 CHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
3732 KeyedLoadICNexus nexus(vector, slot);
3733 CHECK(nexus.StateFromFeedback() == desired_state);
3734 }
3735 }
3736
3737
3738 static void CheckVectorICCleared(Handle<JSFunction> f, int slot_index) {
3739 Handle<TypeFeedbackVector> vector =
3740 Handle<TypeFeedbackVector>(f->shared()->feedback_vector());
3741 FeedbackVectorSlot slot(slot_index);
3742 LoadICNexus nexus(vector, slot);
3743 CHECK(IC::IsCleared(&nexus));
3744 }
3745
3746
3747 TEST(ICInBuiltInIsClearedAppropriately) {
3748 if (i::FLAG_always_opt) return;
3749 CcTest::InitializeVM();
3750 v8::HandleScope scope(CcTest::isolate());
3751
3752 Handle<JSFunction> apply;
3753 {
3754 LocalContext env;
3755 v8::Local<v8::Value> res = CompileRun("Function.apply");
3756 i::Handle<JSReceiver> maybe_apply =
3757 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
3758 apply = i::Handle<JSFunction>::cast(maybe_apply);
3759 i::Handle<TypeFeedbackVector> vector(apply->shared()->feedback_vector());
3760 FeedbackVectorHelper feedback_helper(vector);
3761 CHECK_EQ(1, feedback_helper.slot_count());
3762 CheckVectorIC(apply, 0, UNINITIALIZED);
3763 CompileRun(
3764 "function b(a1, a2, a3) { return a1 + a2 + a3; }"
3765 "function fun(bar) { bar.apply({}, [1, 2, 3]); };"
3766 "fun(b); fun(b)");
3767 CheckVectorIC(apply, 0, MONOMORPHIC);
3768 }
3769
3770 // Fire context dispose notification.
3771 CcTest::isolate()->ContextDisposedNotification();
3772 SimulateIncrementalMarking(CcTest::heap());
3773 CcTest::heap()->CollectAllGarbage();
3774
3775 // The IC in apply has been cleared, ready to learn again.
3776 CheckVectorIC(apply, 0, PREMONOMORPHIC);
3777 }
3778
3779
3780 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3781 if (i::FLAG_always_opt) return;
3782 CcTest::InitializeVM();
3783 v8::HandleScope scope(CcTest::isolate());
3784 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3785 // Prepare function f that contains a monomorphic IC for object
3786 // originating from the same native context.
3787 CompileRun(
3788 "function fun() { this.x = 1; };"
3789 "function f(o) { return new o(); } f(fun); f(fun);");
3790 Handle<JSFunction> f = Handle<JSFunction>::cast(
3791 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3792 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3793
3794 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3795 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3796
3797 SimulateIncrementalMarking(CcTest::heap());
3798 CcTest::heap()->CollectAllGarbage();
3799
3800 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3801 }
3802
3803
3804 TEST(IncrementalMarkingClearsMonomorphicConstructor) {
3805 if (i::FLAG_always_opt) return;
3806 CcTest::InitializeVM();
3807 Isolate* isolate = CcTest::i_isolate();
3808 v8::HandleScope scope(CcTest::isolate());
3809 v8::Local<v8::Value> fun1;
3810 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3811
3812 {
3813 LocalContext env;
3814 CompileRun("function fun() { this.x = 1; };");
3815 fun1 = env->Global()->Get(env.local(), v8_str("fun")).ToLocalChecked();
3816 }
3817
3818 // Prepare function f that contains a monomorphic constructor for object
3819 // originating from a different native context.
3820 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3821 CompileRun(
3822 "function fun() { this.x = 1; };"
3823 "function f(o) { return new o(); } f(fun1); f(fun1);");
3824 Handle<JSFunction> f = Handle<JSFunction>::cast(
3825 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3826 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3827
3828
3829 Handle<TypeFeedbackVector> vector(f->shared()->feedback_vector());
3830 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3831
3832 // Fire context dispose notification.
3833 CcTest::isolate()->ContextDisposedNotification();
3834 SimulateIncrementalMarking(CcTest::heap());
3835 CcTest::heap()->CollectAllGarbage();
3836
3837 CHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(isolate),
3838 vector->Get(FeedbackVectorSlot(0)));
3839 }
3840
3841
3842 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3843 if (i::FLAG_always_opt) return;
3844 CcTest::InitializeVM();
3845 v8::HandleScope scope(CcTest::isolate());
3846 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3847 // Prepare function f that contains a monomorphic IC for object
3848 // originating from the same native context.
3849 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3850 "function f(o) { return o.x; } f(obj); f(obj);");
3851 Handle<JSFunction> f = Handle<JSFunction>::cast(
3852 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3853 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3854
3855 CheckVectorIC(f, 0, MONOMORPHIC);
3856
3857 SimulateIncrementalMarking(CcTest::heap());
3858 CcTest::heap()->CollectAllGarbage();
3859
3860 CheckVectorIC(f, 0, MONOMORPHIC);
3861 }
3862
3863
3864 TEST(IncrementalMarkingClearsMonomorphicIC) {
3865 if (i::FLAG_always_opt) return;
3866 CcTest::InitializeVM();
3867 v8::HandleScope scope(CcTest::isolate());
3868 v8::Local<v8::Value> obj1;
3869 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3870
3871 {
3872 LocalContext env;
3873 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3874 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3875 }
3876
3877 // Prepare function f that contains a monomorphic IC for object
3878 // originating from a different native context.
3879 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3880 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
3881 Handle<JSFunction> f = Handle<JSFunction>::cast(
3882 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3883 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3884
3885 CheckVectorIC(f, 0, MONOMORPHIC);
3886
3887 // Fire context dispose notification.
3888 CcTest::isolate()->ContextDisposedNotification();
3889 SimulateIncrementalMarking(CcTest::heap());
3890 CcTest::heap()->CollectAllGarbage();
3891
3892 CheckVectorICCleared(f, 0);
3893 }
3894
3895
3896 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3897 if (i::FLAG_always_opt) return;
3898 CcTest::InitializeVM();
3899 v8::HandleScope scope(CcTest::isolate());
3900 v8::Local<v8::Value> obj1, obj2;
3901 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3902
3903 {
3904 LocalContext env;
3905 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3906 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3907 }
3908
3909 {
3910 LocalContext env;
3911 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3912 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3913 }
3914
3915 // Prepare function f that contains a polymorphic IC for objects
3916 // originating from two different native contexts.
3917 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3918 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3919 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3920 Handle<JSFunction> f = Handle<JSFunction>::cast(
3921 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3922 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3923
3924 CheckVectorIC(f, 0, POLYMORPHIC);
3925
3926 // Fire context dispose notification.
3927 SimulateIncrementalMarking(CcTest::heap());
3928 CcTest::heap()->CollectAllGarbage();
3929
3930 CheckVectorIC(f, 0, POLYMORPHIC);
3931 }
3932
3933
3934 TEST(IncrementalMarkingClearsPolymorphicIC) {
3935 if (i::FLAG_always_opt) return;
3936 CcTest::InitializeVM();
3937 v8::HandleScope scope(CcTest::isolate());
3938 v8::Local<v8::Value> obj1, obj2;
3939 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3940
3941 {
3942 LocalContext env;
3943 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3944 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3945 }
3946
3947 {
3948 LocalContext env;
3949 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3950 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3951 }
3952
3953 // Prepare function f that contains a polymorphic IC for objects
3954 // originating from two different native contexts.
3955 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3956 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3957 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3958 Handle<JSFunction> f = Handle<JSFunction>::cast(
3959 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3960 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3961
3962 CheckVectorIC(f, 0, POLYMORPHIC);
3963
3964 // Fire context dispose notification.
3965 CcTest::isolate()->ContextDisposedNotification();
3966 SimulateIncrementalMarking(CcTest::heap());
3967 CcTest::heap()->CollectAllGarbage();
3968
3969 CheckVectorICCleared(f, 0);
3970 }
3971
3972
3973 class SourceResource : public v8::String::ExternalOneByteStringResource {
3974 public:
3975 explicit SourceResource(const char* data)
3976 : data_(data), length_(strlen(data)) { }
3977
3978 virtual void Dispose() {
3979 i::DeleteArray(data_);
3980 data_ = NULL;
3981 }
3982
3983 const char* data() const { return data_; }
3984
3985 size_t length() const { return length_; }
3986
3987 bool IsDisposed() { return data_ == NULL; }
3988
3989 private:
3990 const char* data_;
3991 size_t length_;
3992 };
3993
3994
3995 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3996 const char* accessor) {
3997 // Test that the data retained by the Error.stack accessor is released
3998 // after the first time the accessor is fired. We use external string
3999 // to check whether the data is being released since the external string
4000 // resource's callback is fired when the external string is GC'ed.
4001 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4002 v8::HandleScope scope(isolate);
4003 SourceResource* resource = new SourceResource(i::StrDup(source));
4004 {
4005 v8::HandleScope scope(isolate);
4006 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
4007 v8::Local<v8::String> source_string =
4008 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
4009 i_isolate->heap()->CollectAllAvailableGarbage();
4010 v8::Script::Compile(ctx, source_string)
4011 .ToLocalChecked()
4012 ->Run(ctx)
4013 .ToLocalChecked();
4014 CHECK(!resource->IsDisposed());
4015 }
4016 // i_isolate->heap()->CollectAllAvailableGarbage();
4017 CHECK(!resource->IsDisposed());
4018
4019 CompileRun(accessor);
4020 i_isolate->heap()->CollectAllAvailableGarbage();
4021
4022 // External source has been released.
4023 CHECK(resource->IsDisposed());
4024 delete resource;
4025 }
4026
4027
4028 UNINITIALIZED_TEST(ReleaseStackTraceData) {
4029 if (i::FLAG_always_opt) {
4030 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
4031 // See: https://codereview.chromium.org/181833004/
4032 return;
4033 }
4034 FLAG_use_ic = false; // ICs retain objects.
4035 FLAG_concurrent_recompilation = false;
4036 v8::Isolate::CreateParams create_params;
4037 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4038 v8::Isolate* isolate = v8::Isolate::New(create_params);
4039 {
4040 v8::Isolate::Scope isolate_scope(isolate);
4041 v8::HandleScope handle_scope(isolate);
4042 v8::Context::New(isolate)->Enter();
4043 static const char* source1 = "var error = null; "
4044 /* Normal Error */ "try { "
4045 " throw new Error(); "
4046 "} catch (e) { "
4047 " error = e; "
4048 "} ";
4049 static const char* source2 = "var error = null; "
4050 /* Stack overflow */ "try { "
4051 " (function f() { f(); })(); "
4052 "} catch (e) { "
4053 " error = e; "
4054 "} ";
4055 static const char* source3 = "var error = null; "
4056 /* Normal Error */ "try { "
4057 /* as prototype */ " throw new Error(); "
4058 "} catch (e) { "
4059 " error = {}; "
4060 " error.__proto__ = e; "
4061 "} ";
4062 static const char* source4 = "var error = null; "
4063 /* Stack overflow */ "try { "
4064 /* as prototype */ " (function f() { f(); })(); "
4065 "} catch (e) { "
4066 " error = {}; "
4067 " error.__proto__ = e; "
4068 "} ";
4069 static const char* getter = "error.stack";
4070 static const char* setter = "error.stack = 0";
4071
4072 ReleaseStackTraceDataTest(isolate, source1, setter);
4073 ReleaseStackTraceDataTest(isolate, source2, setter);
4074 // We do not test source3 and source4 with setter, since the setter is
4075 // supposed to (untypically) write to the receiver, not the holder. This is
4076 // to emulate the behavior of a data property.
4077
4078 ReleaseStackTraceDataTest(isolate, source1, getter);
4079 ReleaseStackTraceDataTest(isolate, source2, getter);
4080 ReleaseStackTraceDataTest(isolate, source3, getter);
4081 ReleaseStackTraceDataTest(isolate, source4, getter);
4082 }
4083 isolate->Dispose();
4084 }
4085
4086
4087 TEST(Regress159140) {
4088 i::FLAG_allow_natives_syntax = true;
4089 CcTest::InitializeVM();
4090 Isolate* isolate = CcTest::i_isolate();
4091 LocalContext env;
4092 Heap* heap = isolate->heap();
4093 HandleScope scope(isolate);
4094
4095 // Perform one initial GC to enable code flushing.
4096 heap->CollectAllGarbage();
4097
4098 // Prepare several closures that are all eligible for code flushing
4099 // because all reachable ones are not optimized. Make sure that the
4100 // optimized code object is directly reachable through a handle so
4101 // that it is marked black during incremental marking.
4102 Handle<Code> code;
4103 {
4104 HandleScope inner_scope(isolate);
4105 CompileRun("function h(x) {}"
4106 "function mkClosure() {"
4107 " return function(x) { return x + 1; };"
4108 "}"
4109 "var f = mkClosure();"
4110 "var g = mkClosure();"
4111 "f(1); f(2);"
4112 "g(1); g(2);"
4113 "h(1); h(2);"
4114 "%OptimizeFunctionOnNextCall(f); f(3);"
4115 "%OptimizeFunctionOnNextCall(h); h(3);");
4116
4117 Handle<JSFunction> f = Handle<JSFunction>::cast(
4118 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4119 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4120 CHECK(f->is_compiled());
4121 CompileRun("f = null;");
4122
4123 Handle<JSFunction> g = Handle<JSFunction>::cast(
4124 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4125 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4126 CHECK(g->is_compiled());
4127 const int kAgingThreshold = 6;
4128 for (int i = 0; i < kAgingThreshold; i++) {
4129 g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4130 }
4131
4132 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4133 }
4134
4135 // Simulate incremental marking so that the functions are enqueued as
4136 // code flushing candidates. Then optimize one function. Finally
4137 // finish the GC to complete code flushing.
4138 SimulateIncrementalMarking(heap);
4139 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4140 heap->CollectAllGarbage();
4141
4142 // Unoptimized code is missing and the deoptimizer will go ballistic.
4143 CompileRun("g('bozo');");
4144 }
4145
4146
4147 TEST(Regress165495) {
4148 i::FLAG_allow_natives_syntax = true;
4149 CcTest::InitializeVM();
4150 Isolate* isolate = CcTest::i_isolate();
4151 Heap* heap = isolate->heap();
4152 HandleScope scope(isolate);
4153
4154 // Perform one initial GC to enable code flushing.
4155 heap->CollectAllGarbage();
4156
4157 // Prepare an optimized closure that the optimized code map will get
4158 // populated. Then age the unoptimized code to trigger code flushing
4159 // but make sure the optimized code is unreachable.
4160 {
4161 HandleScope inner_scope(isolate);
4162 LocalContext env;
4163 CompileRun("function mkClosure() {"
4164 " return function(x) { return x + 1; };"
4165 "}"
4166 "var f = mkClosure();"
4167 "f(1); f(2);"
4168 "%OptimizeFunctionOnNextCall(f); f(3);");
4169
4170 Handle<JSFunction> f = Handle<JSFunction>::cast(
4171 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4172 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4173 CHECK(f->is_compiled());
4174 const int kAgingThreshold = 6;
4175 for (int i = 0; i < kAgingThreshold; i++) {
4176 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4177 }
4178
4179 CompileRun("f = null;");
4180 }
4181
4182 // Simulate incremental marking so that unoptimized code is flushed
4183 // even though it still is cached in the optimized code map.
4184 SimulateIncrementalMarking(heap);
4185 heap->CollectAllGarbage();
4186
4187 // Make a new closure that will get code installed from the code map.
4188 // Unoptimized code is missing and the deoptimizer will go ballistic.
4189 CompileRun("var g = mkClosure(); g('bozo');");
4190 }
4191
4192
4193 TEST(Regress169209) {
4194 i::FLAG_stress_compaction = false;
4195 i::FLAG_allow_natives_syntax = true;
4196
4197 CcTest::InitializeVM();
4198 Isolate* isolate = CcTest::i_isolate();
4199 Heap* heap = isolate->heap();
4200 HandleScope scope(isolate);
4201
4202 // Perform one initial GC to enable code flushing.
4203 heap->CollectAllGarbage();
4204
4205 // Prepare a shared function info eligible for code flushing for which
4206 // the unoptimized code will be replaced during optimization.
4207 Handle<SharedFunctionInfo> shared1;
4208 {
4209 HandleScope inner_scope(isolate);
4210 LocalContext env;
4211 CompileRun("function f() { return 'foobar'; }"
4212 "function g(x) { if (x) f(); }"
4213 "f();"
4214 "g(false);"
4215 "g(false);");
4216
4217 Handle<JSFunction> f = Handle<JSFunction>::cast(
4218 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4219 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4220 CHECK(f->is_compiled());
4221 const int kAgingThreshold = 6;
4222 for (int i = 0; i < kAgingThreshold; i++) {
4223 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4224 }
4225
4226 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4227 }
4228
4229 // Prepare a shared function info eligible for code flushing that will
4230 // represent the dangling tail of the candidate list.
4231 Handle<SharedFunctionInfo> shared2;
4232 {
4233 HandleScope inner_scope(isolate);
4234 LocalContext env;
4235 CompileRun("function flushMe() { return 0; }"
4236 "flushMe(1);");
4237
4238 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4239 *v8::Local<v8::Function>::Cast(CcTest::global()
4240 ->Get(env.local(), v8_str("flushMe"))
4241 .ToLocalChecked())));
4242 CHECK(f->is_compiled());
4243 const int kAgingThreshold = 6;
4244 for (int i = 0; i < kAgingThreshold; i++) {
4245 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4246 }
4247
4248 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4249 }
4250
4251 // Simulate incremental marking and collect code flushing candidates.
4252 SimulateIncrementalMarking(heap);
4253 CHECK(shared1->code()->gc_metadata() != NULL);
4254
4255 // Optimize function and make sure the unoptimized code is replaced.
4256 #ifdef DEBUG
4257 FLAG_stop_at = "f";
4258 #endif
4259 CompileRun("%OptimizeFunctionOnNextCall(g);"
4260 "g(false);");
4261
4262 // Finish garbage collection cycle.
4263 heap->CollectAllGarbage();
4264 CHECK(shared1->code()->gc_metadata() == NULL);
4265 }
4266
4267
4268 TEST(Regress169928) {
4269 i::FLAG_allow_natives_syntax = true;
4270 i::FLAG_crankshaft = false;
4271 CcTest::InitializeVM();
4272 Isolate* isolate = CcTest::i_isolate();
4273 LocalContext env;
4274 Factory* factory = isolate->factory();
4275 v8::HandleScope scope(CcTest::isolate());
4276
4277 // Some flags turn Scavenge collections into Mark-sweep collections
4278 // and hence are incompatible with this test case.
4279 if (FLAG_gc_global || FLAG_stress_compaction) return;
4280
4281 // Prepare the environment
4282 CompileRun("function fastliteralcase(literal, value) {"
4283 " literal[0] = value;"
4284 " return literal;"
4285 "}"
4286 "function get_standard_literal() {"
4287 " var literal = [1, 2, 3];"
4288 " return literal;"
4289 "}"
4290 "obj = fastliteralcase(get_standard_literal(), 1);"
4291 "obj = fastliteralcase(get_standard_literal(), 1.5);"
4292 "obj = fastliteralcase(get_standard_literal(), 2);");
4293
4294 // prepare the heap
4295 v8::Local<v8::String> mote_code_string =
4296 v8_str("fastliteralcase(mote, 2.5);");
4297
4298 v8::Local<v8::String> array_name = v8_str("mote");
4299 CHECK(CcTest::global()
4300 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4301 .FromJust());
4302
4303 // First make sure we flip spaces
4304 CcTest::heap()->CollectGarbage(NEW_SPACE);
4305
4306 // Allocate the object.
4307 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4308 array_data->set(0, Smi::FromInt(1));
4309 array_data->set(1, Smi::FromInt(2));
4310
4311 AllocateAllButNBytes(CcTest::heap()->new_space(),
4312 JSArray::kSize + AllocationMemento::kSize +
4313 kPointerSize);
4314
4315 Handle<JSArray> array =
4316 factory->NewJSArrayWithElements(array_data, FAST_SMI_ELEMENTS);
4317
4318 CHECK_EQ(Smi::FromInt(2), array->length());
4319 CHECK(array->HasFastSmiOrObjectElements());
4320
4321 // We need filler the size of AllocationMemento object, plus an extra
4322 // fill pointer value.
4323 HeapObject* obj = NULL;
4324 AllocationResult allocation =
4325 CcTest::heap()->new_space()->AllocateRawUnaligned(
4326 AllocationMemento::kSize + kPointerSize);
4327 CHECK(allocation.To(&obj));
4328 Address addr_obj = obj->address();
4329 CcTest::heap()->CreateFillerObjectAt(
4330 addr_obj, AllocationMemento::kSize + kPointerSize);
4331
4332 // Give the array a name, making sure not to allocate strings.
4333 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
4334 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
4335
4336 // This should crash with a protection violation if we are running a build
4337 // with the bug.
4338 AlwaysAllocateScope aa_scope(isolate);
4339 v8::Script::Compile(env.local(), mote_code_string)
4340 .ToLocalChecked()
4341 ->Run(env.local())
4342 .ToLocalChecked();
4343 }
4344
4345
4346 #ifdef DEBUG
4347 TEST(Regress513507) {
4348 i::FLAG_flush_optimized_code_cache = false;
4349 i::FLAG_allow_natives_syntax = true;
4350 i::FLAG_gc_global = true;
4351 CcTest::InitializeVM();
4352 Isolate* isolate = CcTest::i_isolate();
4353 LocalContext env;
4354 Heap* heap = isolate->heap();
4355 HandleScope scope(isolate);
4356
4357 // Prepare function whose optimized code map we can use.
4358 Handle<SharedFunctionInfo> shared;
4359 {
4360 HandleScope inner_scope(isolate);
4361 CompileRun("function f() { return 1 }"
4362 "f(); %OptimizeFunctionOnNextCall(f); f();");
4363
4364 Handle<JSFunction> f = Handle<JSFunction>::cast(
4365 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4366 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4367 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4368 CompileRun("f = null");
4369 }
4370
4371 // Prepare optimized code that we can use.
4372 Handle<Code> code;
4373 {
4374 HandleScope inner_scope(isolate);
4375 CompileRun("function g() { return 2 }"
4376 "g(); %OptimizeFunctionOnNextCall(g); g();");
4377
4378 Handle<JSFunction> g = Handle<JSFunction>::cast(
4379 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4380 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4381 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4382 if (!code->is_optimized_code()) return;
4383 }
4384
4385 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4386 Handle<LiteralsArray> lit =
4387 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4388 Handle<Context> context(isolate->context());
4389
4390 // Add the new code several times to the optimized code map and also set an
4391 // allocation timeout so that expanding the code map will trigger a GC.
4392 heap->set_allocation_timeout(5);
4393 FLAG_gc_interval = 1000;
4394 for (int i = 0; i < 10; ++i) {
4395 BailoutId id = BailoutId(i);
4396 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4397 }
4398 }
4399 #endif // DEBUG
4400
4401
4402 TEST(Regress514122) {
4403 i::FLAG_flush_optimized_code_cache = false;
4404 i::FLAG_allow_natives_syntax = true;
4405 CcTest::InitializeVM();
4406 Isolate* isolate = CcTest::i_isolate();
4407 LocalContext env;
4408 Heap* heap = isolate->heap();
4409 HandleScope scope(isolate);
4410
4411 // Perfrom one initial GC to enable code flushing.
4412 CcTest::heap()->CollectAllGarbage();
4413
4414 // Prepare function whose optimized code map we can use.
4415 Handle<SharedFunctionInfo> shared;
4416 {
4417 HandleScope inner_scope(isolate);
4418 CompileRun("function f() { return 1 }"
4419 "f(); %OptimizeFunctionOnNextCall(f); f();");
4420
4421 Handle<JSFunction> f = Handle<JSFunction>::cast(
4422 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4423 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4424 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4425 CompileRun("f = null");
4426 }
4427
4428 // Prepare optimized code that we can use.
4429 Handle<Code> code;
4430 {
4431 HandleScope inner_scope(isolate);
4432 CompileRun("function g() { return 2 }"
4433 "g(); %OptimizeFunctionOnNextCall(g); g();");
4434
4435 Handle<JSFunction> g = Handle<JSFunction>::cast(
4436 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4437 CcTest::global()->Get(env.local(), v8_str("g")).ToLocalChecked())));
4438 code = inner_scope.CloseAndEscape(handle(g->code(), isolate));
4439 if (!code->is_optimized_code()) return;
4440 }
4441
4442 Handle<TypeFeedbackVector> vector = handle(shared->feedback_vector());
4443 Handle<LiteralsArray> lit =
4444 LiteralsArray::New(isolate, vector, shared->num_literals(), TENURED);
4445 Handle<Context> context(isolate->context());
4446
4447 // Add the code several times to the optimized code map.
4448 for (int i = 0; i < 3; ++i) {
4449 HandleScope inner_scope(isolate);
4450 BailoutId id = BailoutId(i);
4451 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4452 }
4453 shared->optimized_code_map()->Print();
4454
4455 // Add the code with a literals array to be evacuated.
4456 Page* evac_page;
4457 {
4458 HandleScope inner_scope(isolate);
4459 AlwaysAllocateScope always_allocate(isolate);
4460 // Make sure literal is placed on an old-space evacuation candidate.
4461 SimulateFullSpace(heap->old_space());
4462
4463 // Make sure there the number of literals is > 0.
4464 Handle<LiteralsArray> lit =
4465 LiteralsArray::New(isolate, vector, 23, TENURED);
4466
4467 evac_page = Page::FromAddress(lit->address());
4468 BailoutId id = BailoutId(100);
4469 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4470 }
4471
4472 // Heap is ready, force {lit_page} to become an evacuation candidate and
4473 // simulate incremental marking to enqueue optimized code map.
4474 FLAG_manual_evacuation_candidates_selection = true;
4475 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4476 SimulateIncrementalMarking(heap);
4477
4478 // No matter whether reachable or not, {boomer} is doomed.
4479 Handle<Object> boomer(shared->optimized_code_map(), isolate);
4480
4481 // Add the code several times to the optimized code map. This will leave old
4482 // copies of the optimized code map unreachable but still marked.
4483 for (int i = 3; i < 6; ++i) {
4484 HandleScope inner_scope(isolate);
4485 BailoutId id = BailoutId(i);
4486 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4487 }
4488
4489 // Trigger a GC to flush out the bug.
4490 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole");
4491 boomer->Print();
4492 }
4493
4494
4495 TEST(Regress513496) {
4496 i::FLAG_flush_optimized_code_cache = false;
4497 i::FLAG_allow_natives_syntax = true;
4498 CcTest::InitializeVM();
4499 Isolate* isolate = CcTest::i_isolate();
4500 Heap* heap = isolate->heap();
4501 HandleScope scope(isolate);
4502
4503 // Perfrom one initial GC to enable code flushing.
4504 CcTest::heap()->CollectAllGarbage();
4505
4506 // Prepare an optimized closure with containing an inlined function. Then age
4507 // the inlined unoptimized code to trigger code flushing but make sure the
4508 // outer optimized code is kept in the optimized code map.
4509 Handle<SharedFunctionInfo> shared;
4510 {
4511 LocalContext context;
4512 HandleScope inner_scope(isolate);
4513 CompileRun(
4514 "function g(x) { return x + 1 }"
4515 "function mkClosure() {"
4516 " return function(x) { return g(x); };"
4517 "}"
4518 "var f = mkClosure();"
4519 "f(1); f(2);"
4520 "%OptimizeFunctionOnNextCall(f); f(3);");
4521
4522 Handle<JSFunction> g = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4523 *v8::Local<v8::Function>::Cast(CcTest::global()
4524 ->Get(context.local(), v8_str("g"))
4525 .ToLocalChecked())));
4526 CHECK(g->shared()->is_compiled());
4527 const int kAgingThreshold = 6;
4528 for (int i = 0; i < kAgingThreshold; i++) {
4529 g->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4530 }
4531
4532 Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4533 *v8::Local<v8::Function>::Cast(CcTest::global()
4534 ->Get(context.local(), v8_str("f"))
4535 .ToLocalChecked())));
4536 CHECK(f->is_compiled());
4537 shared = inner_scope.CloseAndEscape(handle(f->shared(), isolate));
4538 CompileRun("f = null");
4539 }
4540
4541 // Lookup the optimized code and keep it alive.
4542 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4543 isolate->context()->native_context(), BailoutId::None());
4544 Handle<Code> optimized_code(result.code, isolate);
4545
4546 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4547 // though the optimized code for 'f' is reachable via the optimized code map.
4548 heap->CollectAllGarbage();
4549
4550 // Make a new closure that will get code installed from the code map.
4551 // Unoptimized code is missing and the deoptimizer will go ballistic.
4552 CompileRun("var h = mkClosure(); h('bozo');");
4553 }
4554
4555
4556 TEST(LargeObjectSlotRecording) {
4557 FLAG_manual_evacuation_candidates_selection = true;
4558 CcTest::InitializeVM();
4559 Isolate* isolate = CcTest::i_isolate();
4560 Heap* heap = isolate->heap();
4561 HandleScope scope(isolate);
4562
4563 // Create an object on an evacuation candidate.
4564 SimulateFullSpace(heap->old_space());
4565 Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
4566 Page* evac_page = Page::FromAddress(lit->address());
4567 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
4568 FixedArray* old_location = *lit;
4569
4570 // Allocate a large object.
4571 int size = Max(1000000, Page::kMaxRegularHeapObjectSize + KB);
4572 CHECK(size > Page::kMaxRegularHeapObjectSize);
4573 Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
4574 CHECK(heap->lo_space()->Contains(*lo));
4575
4576 // Start incremental marking to active write barrier.
4577 SimulateIncrementalMarking(heap, false);
4578 heap->incremental_marking()->AdvanceIncrementalMarking(
4579 10000000, 10000000, IncrementalMarking::IdleStepActions());
4580
4581 // Create references from the large object to the object on the evacuation
4582 // candidate.
4583 const int kStep = size / 10;
4584 for (int i = 0; i < size; i += kStep) {
4585 lo->set(i, *lit);
4586 CHECK(lo->get(i) == old_location);
4587 }
4588
4589 // Move the evaucation candidate object.
4590 CcTest::heap()->CollectAllGarbage();
4591
4592 // Verify that the pointers in the large object got updated.
4593 for (int i = 0; i < size; i += kStep) {
4594 CHECK_EQ(lo->get(i), *lit);
4595 CHECK(lo->get(i) != old_location);
4596 }
4597 }
4598
4599
4600 class DummyVisitor : public ObjectVisitor {
4601 public:
4602 void VisitPointers(Object** start, Object** end) override {}
4603 };
4604
4605
4606 TEST(DeferredHandles) {
4607 CcTest::InitializeVM();
4608 Isolate* isolate = CcTest::i_isolate();
4609 Heap* heap = isolate->heap();
4610 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
4611 HandleScopeData* data = isolate->handle_scope_data();
4612 Handle<Object> init(heap->empty_string(), isolate);
4613 while (data->next < data->limit) {
4614 Handle<Object> obj(heap->empty_string(), isolate);
4615 }
4616 // An entire block of handles has been filled.
4617 // Next handle would require a new block.
4618 CHECK(data->next == data->limit);
4619
4620 DeferredHandleScope deferred(isolate);
4621 DummyVisitor visitor;
4622 isolate->handle_scope_implementer()->Iterate(&visitor);
4623 delete deferred.Detach();
4624 }
4625
4626
4627 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4628 CcTest::InitializeVM();
4629 v8::HandleScope scope(CcTest::isolate());
4630 CompileRun("function f(n) {"
4631 " var a = new Array(n);"
4632 " for (var i = 0; i < n; i += 100) a[i] = i;"
4633 "};"
4634 "f(10 * 1024 * 1024);");
4635 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4636 if (marking->IsStopped()) {
4637 CcTest::heap()->StartIncrementalMarking();
4638 }
4639 // This big step should be sufficient to mark the whole array.
4640 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
4641 CHECK(marking->IsComplete() ||
4642 marking->IsReadyToOverApproximateWeakClosure());
4643 }
4644
4645
4646 TEST(DisableInlineAllocation) {
4647 i::FLAG_allow_natives_syntax = true;
4648 CcTest::InitializeVM();
4649 v8::HandleScope scope(CcTest::isolate());
4650 CompileRun("function test() {"
4651 " var x = [];"
4652 " for (var i = 0; i < 10; i++) {"
4653 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
4654 " }"
4655 "}"
4656 "function run() {"
4657 " %OptimizeFunctionOnNextCall(test);"
4658 " test();"
4659 " %DeoptimizeFunction(test);"
4660 "}");
4661
4662 // Warm-up with inline allocation enabled.
4663 CompileRun("test(); test(); run();");
4664
4665 // Run test with inline allocation disabled.
4666 CcTest::heap()->DisableInlineAllocation();
4667 CompileRun("run()");
4668
4669 // Run test with inline allocation re-enabled.
4670 CcTest::heap()->EnableInlineAllocation();
4671 CompileRun("run()");
4672 }
4673
4674
4675 static int AllocationSitesCount(Heap* heap) {
4676 int count = 0;
4677 for (Object* site = heap->allocation_sites_list();
4678 !(site->IsUndefined());
4679 site = AllocationSite::cast(site)->weak_next()) {
4680 count++;
4681 }
4682 return count;
4683 }
4684
4685
4686 TEST(EnsureAllocationSiteDependentCodesProcessed) {
4687 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4688 i::FLAG_allow_natives_syntax = true;
4689 CcTest::InitializeVM();
4690 Isolate* isolate = CcTest::i_isolate();
4691 v8::internal::Heap* heap = CcTest::heap();
4692 GlobalHandles* global_handles = isolate->global_handles();
4693
4694 if (!isolate->use_crankshaft()) return;
4695
4696 // The allocation site at the head of the list is ours.
4697 Handle<AllocationSite> site;
4698 {
4699 LocalContext context;
4700 v8::HandleScope scope(context->GetIsolate());
4701
4702 int count = AllocationSitesCount(heap);
4703 CompileRun("var bar = function() { return (new Array()); };"
4704 "var a = bar();"
4705 "bar();"
4706 "bar();");
4707
4708 // One allocation site should have been created.
4709 int new_count = AllocationSitesCount(heap);
4710 CHECK_EQ(new_count, (count + 1));
4711 site = Handle<AllocationSite>::cast(
4712 global_handles->Create(
4713 AllocationSite::cast(heap->allocation_sites_list())));
4714
4715 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
4716
4717 CHECK_EQ(DependentCode::kAllocationSiteTransitionChangedGroup,
4718 site->dependent_code()->group());
4719 CHECK_EQ(1, site->dependent_code()->count());
4720 CHECK(site->dependent_code()->object_at(0)->IsWeakCell());
4721 Code* function_bar = Code::cast(
4722 WeakCell::cast(site->dependent_code()->object_at(0))->value());
4723 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
4724 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4725 CcTest::global()
4726 ->Get(context.local(), v8_str("bar"))
4727 .ToLocalChecked())));
4728 CHECK_EQ(bar_handle->code(), function_bar);
4729 }
4730
4731 // Now make sure that a gc should get rid of the function, even though we
4732 // still have the allocation site alive.
4733 for (int i = 0; i < 4; i++) {
4734 heap->CollectAllGarbage();
4735 }
4736
4737 // The site still exists because of our global handle, but the code is no
4738 // longer referred to by dependent_code().
4739 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4740 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4741 }
4742
4743
4744 TEST(CellsInOptimizedCodeAreWeak) {
4745 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4746 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4747 i::FLAG_allow_natives_syntax = true;
4748 CcTest::InitializeVM();
4749 Isolate* isolate = CcTest::i_isolate();
4750 v8::internal::Heap* heap = CcTest::heap();
4751
4752 if (!isolate->use_crankshaft()) return;
4753 HandleScope outer_scope(heap->isolate());
4754 Handle<Code> code;
4755 {
4756 LocalContext context;
4757 HandleScope scope(heap->isolate());
4758
4759 CompileRun("bar = (function() {"
4760 " function bar() {"
4761 " return foo(1);"
4762 " };"
4763 " var foo = function(x) { with (x) { return 1 + x; } };"
4764 " bar(foo);"
4765 " bar(foo);"
4766 " bar(foo);"
4767 " %OptimizeFunctionOnNextCall(bar);"
4768 " bar(foo);"
4769 " return bar;})();");
4770
4771 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4772 *v8::Local<v8::Function>::Cast(CcTest::global()
4773 ->Get(context.local(), v8_str("bar"))
4774 .ToLocalChecked())));
4775 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4776 }
4777
4778 // Now make sure that a gc should get rid of the function
4779 for (int i = 0; i < 4; i++) {
4780 heap->CollectAllGarbage();
4781 }
4782
4783 CHECK(code->marked_for_deoptimization());
4784 }
4785
4786
4787 TEST(ObjectsInOptimizedCodeAreWeak) {
4788 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4789 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4790 i::FLAG_allow_natives_syntax = true;
4791 CcTest::InitializeVM();
4792 Isolate* isolate = CcTest::i_isolate();
4793 v8::internal::Heap* heap = CcTest::heap();
4794
4795 if (!isolate->use_crankshaft()) return;
4796 HandleScope outer_scope(heap->isolate());
4797 Handle<Code> code;
4798 {
4799 LocalContext context;
4800 HandleScope scope(heap->isolate());
4801
4802 CompileRun("function bar() {"
4803 " return foo(1);"
4804 "};"
4805 "function foo(x) { with (x) { return 1 + x; } };"
4806 "bar();"
4807 "bar();"
4808 "bar();"
4809 "%OptimizeFunctionOnNextCall(bar);"
4810 "bar();");
4811
4812 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4813 *v8::Local<v8::Function>::Cast(CcTest::global()
4814 ->Get(context.local(), v8_str("bar"))
4815 .ToLocalChecked())));
4816 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4817 }
4818
4819 // Now make sure that a gc should get rid of the function
4820 for (int i = 0; i < 4; i++) {
4821 heap->CollectAllGarbage();
4822 }
4823
4824 CHECK(code->marked_for_deoptimization());
4825 }
4826
4827
4828 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4829 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4830 if (!i::FLAG_incremental_marking) return;
4831 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4832 i::FLAG_allow_natives_syntax = true;
4833 i::FLAG_compilation_cache = false;
4834 i::FLAG_retain_maps_for_n_gc = 0;
4835 CcTest::InitializeVM();
4836 Isolate* isolate = CcTest::i_isolate();
4837
4838 // Do not run for no-snap builds.
4839 if (!i::Snapshot::HaveASnapshotToStartFrom(isolate)) return;
4840
4841 v8::internal::Heap* heap = CcTest::heap();
4842
4843 // Get a clean slate regarding optimized functions on the heap.
4844 i::Deoptimizer::DeoptimizeAll(isolate);
4845 heap->CollectAllGarbage();
4846
4847 if (!isolate->use_crankshaft()) return;
4848 HandleScope outer_scope(heap->isolate());
4849 for (int i = 0; i < 3; i++) {
4850 SimulateIncrementalMarking(heap);
4851 {
4852 LocalContext context;
4853 HandleScope scope(heap->isolate());
4854 EmbeddedVector<char, 256> source;
4855 SNPrintF(source,
4856 "function bar%d() {"
4857 " return foo%d(1);"
4858 "};"
4859 "function foo%d(x) { with (x) { return 1 + x; } };"
4860 "bar%d();"
4861 "bar%d();"
4862 "bar%d();"
4863 "%%OptimizeFunctionOnNextCall(bar%d);"
4864 "bar%d();",
4865 i, i, i, i, i, i, i, i);
4866 CompileRun(source.start());
4867 }
4868 heap->CollectAllGarbage();
4869 }
4870 int elements = 0;
4871 if (heap->weak_object_to_code_table()->IsHashTable()) {
4872 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4873 elements = t->NumberOfElements();
4874 }
4875 CHECK_EQ(0, elements);
4876 }
4877
4878
4879 static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4880 const char* name) {
4881 EmbeddedVector<char, 256> source;
4882 SNPrintF(source,
4883 "function %s() { return 0; }"
4884 "%s(); %s();"
4885 "%%OptimizeFunctionOnNextCall(%s);"
4886 "%s();", name, name, name, name, name);
4887 CompileRun(source.start());
4888 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4889 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4890 CcTest::global()
4891 ->Get(isolate->GetCurrentContext(), v8_str(name))
4892 .ToLocalChecked())));
4893 return fun;
4894 }
4895
4896
4897 static int GetCodeChainLength(Code* code) {
4898 int result = 0;
4899 while (code->next_code_link()->IsCode()) {
4900 result++;
4901 code = Code::cast(code->next_code_link());
4902 }
4903 return result;
4904 }
4905
4906
4907 TEST(NextCodeLinkIsWeak) {
4908 i::FLAG_always_opt = false;
4909 i::FLAG_allow_natives_syntax = true;
4910 CcTest::InitializeVM();
4911 Isolate* isolate = CcTest::i_isolate();
4912 v8::internal::Heap* heap = CcTest::heap();
4913
4914 if (!isolate->use_crankshaft()) return;
4915 HandleScope outer_scope(heap->isolate());
4916 Handle<Code> code;
4917 heap->CollectAllAvailableGarbage();
4918 int code_chain_length_before, code_chain_length_after;
4919 {
4920 HandleScope scope(heap->isolate());
4921 Handle<JSFunction> mortal =
4922 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4923 Handle<JSFunction> immortal =
4924 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4925 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4926 code_chain_length_before = GetCodeChainLength(immortal->code());
4927 // Keep the immortal code and let the mortal code die.
4928 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
4929 CompileRun("mortal = null; immortal = null;");
4930 }
4931 heap->CollectAllAvailableGarbage();
4932 // Now mortal code should be dead.
4933 code_chain_length_after = GetCodeChainLength(*code);
4934 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4935 }
4936
4937
4938 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4939 i::byte buffer[i::Assembler::kMinimalBufferSize];
4940 MacroAssembler masm(isolate, buffer, sizeof(buffer),
4941 v8::internal::CodeObjectRequired::kYes);
4942 CodeDesc desc;
4943 masm.Push(isolate->factory()->undefined_value());
4944 masm.Drop(1);
4945 masm.GetCode(&desc);
4946 Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
4947 Handle<Code> code = isolate->factory()->NewCode(
4948 desc, Code::ComputeFlags(Code::OPTIMIZED_FUNCTION), undefined);
4949 CHECK(code->IsCode());
4950 return code;
4951 }
4952
4953
4954 TEST(NextCodeLinkIsWeak2) {
4955 i::FLAG_allow_natives_syntax = true;
4956 CcTest::InitializeVM();
4957 Isolate* isolate = CcTest::i_isolate();
4958 v8::internal::Heap* heap = CcTest::heap();
4959
4960 if (!isolate->use_crankshaft()) return;
4961 HandleScope outer_scope(heap->isolate());
4962 heap->CollectAllAvailableGarbage();
4963 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4964 Handle<Code> new_head;
4965 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4966 {
4967 HandleScope scope(heap->isolate());
4968 Handle<Code> immortal = DummyOptimizedCode(isolate);
4969 Handle<Code> mortal = DummyOptimizedCode(isolate);
4970 mortal->set_next_code_link(*old_head);
4971 immortal->set_next_code_link(*mortal);
4972 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4973 new_head = scope.CloseAndEscape(immortal);
4974 }
4975 heap->CollectAllAvailableGarbage();
4976 // Now mortal code should be dead.
4977 CHECK_EQ(*old_head, new_head->next_code_link());
4978 }
4979
4980
4981 static bool weak_ic_cleared = false;
4982
4983 static void ClearWeakIC(
4984 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4985 printf("clear weak is called\n");
4986 weak_ic_cleared = true;
4987 data.GetParameter()->Reset();
4988 }
4989
4990
4991 TEST(WeakFunctionInConstructor) {
4992 if (i::FLAG_always_opt) return;
4993 i::FLAG_stress_compaction = false;
4994 CcTest::InitializeVM();
4995 v8::Isolate* isolate = CcTest::isolate();
4996 LocalContext env;
4997 v8::HandleScope scope(isolate);
4998 CompileRun(
4999 "function createObj(obj) {"
5000 " return new obj();"
5001 "}");
5002 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
5003 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
5004 CcTest::global()
5005 ->Get(env.local(), v8_str("createObj"))
5006 .ToLocalChecked())));
5007
5008 v8::Persistent<v8::Object> garbage;
5009 {
5010 v8::HandleScope scope(isolate);
5011 const char* source =
5012 " (function() {"
5013 " function hat() { this.x = 5; }"
5014 " createObj(hat);"
5015 " createObj(hat);"
5016 " return hat;"
5017 " })();";
5018 garbage.Reset(isolate, CompileRun(env.local(), source)
5019 .ToLocalChecked()
5020 ->ToObject(env.local())
5021 .ToLocalChecked());
5022 }
5023 weak_ic_cleared = false;
5024 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5025 Heap* heap = CcTest::i_isolate()->heap();
5026 heap->CollectAllGarbage();
5027 CHECK(weak_ic_cleared);
5028
5029 // We've determined the constructor in createObj has had it's weak cell
5030 // cleared. Now, verify that one additional call with a new function
5031 // allows monomorphicity.
5032 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5033 createObj->shared()->feedback_vector(), CcTest::i_isolate());
5034 for (int i = 0; i < 20; i++) {
5035 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5036 CHECK(slot_value->IsWeakCell());
5037 if (WeakCell::cast(slot_value)->cleared()) break;
5038 heap->CollectAllGarbage();
5039 }
5040
5041 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5042 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5043 CompileRun(
5044 "function coat() { this.x = 6; }"
5045 "createObj(coat);");
5046 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5047 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5048 }
5049
5050
5051 // Checks that the value returned by execution of the source is weak.
5052 void CheckWeakness(const char* source) {
5053 i::FLAG_stress_compaction = false;
5054 CcTest::InitializeVM();
5055 v8::Isolate* isolate = CcTest::isolate();
5056 LocalContext env;
5057 v8::HandleScope scope(isolate);
5058 v8::Persistent<v8::Object> garbage;
5059 {
5060 v8::HandleScope scope(isolate);
5061 garbage.Reset(isolate, CompileRun(env.local(), source)
5062 .ToLocalChecked()
5063 ->ToObject(env.local())
5064 .ToLocalChecked());
5065 }
5066 weak_ic_cleared = false;
5067 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5068 Heap* heap = CcTest::i_isolate()->heap();
5069 heap->CollectAllGarbage();
5070 CHECK(weak_ic_cleared);
5071 }
5072
5073
5074 // Each of the following "weak IC" tests creates an IC that embeds a map with
5075 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5076 TEST(WeakMapInMonomorphicLoadIC) {
5077 CheckWeakness("function loadIC(obj) {"
5078 " return obj.name;"
5079 "}"
5080 " (function() {"
5081 " var proto = {'name' : 'weak'};"
5082 " var obj = Object.create(proto);"
5083 " loadIC(obj);"
5084 " loadIC(obj);"
5085 " loadIC(obj);"
5086 " return proto;"
5087 " })();");
5088 }
5089
5090
5091 TEST(WeakMapInPolymorphicLoadIC) {
5092 CheckWeakness(
5093 "function loadIC(obj) {"
5094 " return obj.name;"
5095 "}"
5096 " (function() {"
5097 " var proto = {'name' : 'weak'};"
5098 " var obj = Object.create(proto);"
5099 " loadIC(obj);"
5100 " loadIC(obj);"
5101 " loadIC(obj);"
5102 " var poly = Object.create(proto);"
5103 " poly.x = true;"
5104 " loadIC(poly);"
5105 " return proto;"
5106 " })();");
5107 }
5108
5109
5110 TEST(WeakMapInMonomorphicKeyedLoadIC) {
5111 CheckWeakness("function keyedLoadIC(obj, field) {"
5112 " return obj[field];"
5113 "}"
5114 " (function() {"
5115 " var proto = {'name' : 'weak'};"
5116 " var obj = Object.create(proto);"
5117 " keyedLoadIC(obj, 'name');"
5118 " keyedLoadIC(obj, 'name');"
5119 " keyedLoadIC(obj, 'name');"
5120 " return proto;"
5121 " })();");
5122 }
5123
5124
5125 TEST(WeakMapInPolymorphicKeyedLoadIC) {
5126 CheckWeakness(
5127 "function keyedLoadIC(obj, field) {"
5128 " return obj[field];"
5129 "}"
5130 " (function() {"
5131 " var proto = {'name' : 'weak'};"
5132 " var obj = Object.create(proto);"
5133 " keyedLoadIC(obj, 'name');"
5134 " keyedLoadIC(obj, 'name');"
5135 " keyedLoadIC(obj, 'name');"
5136 " var poly = Object.create(proto);"
5137 " poly.x = true;"
5138 " keyedLoadIC(poly, 'name');"
5139 " return proto;"
5140 " })();");
5141 }
5142
5143
5144 TEST(WeakMapInMonomorphicStoreIC) {
5145 CheckWeakness("function storeIC(obj, value) {"
5146 " obj.name = value;"
5147 "}"
5148 " (function() {"
5149 " var proto = {'name' : 'weak'};"
5150 " var obj = Object.create(proto);"
5151 " storeIC(obj, 'x');"
5152 " storeIC(obj, 'x');"
5153 " storeIC(obj, 'x');"
5154 " return proto;"
5155 " })();");
5156 }
5157
5158
5159 TEST(WeakMapInPolymorphicStoreIC) {
5160 CheckWeakness(
5161 "function storeIC(obj, value) {"
5162 " obj.name = value;"
5163 "}"
5164 " (function() {"
5165 " var proto = {'name' : 'weak'};"
5166 " var obj = Object.create(proto);"
5167 " storeIC(obj, 'x');"
5168 " storeIC(obj, 'x');"
5169 " storeIC(obj, 'x');"
5170 " var poly = Object.create(proto);"
5171 " poly.x = true;"
5172 " storeIC(poly, 'x');"
5173 " return proto;"
5174 " })();");
5175 }
5176
5177
5178 TEST(WeakMapInMonomorphicKeyedStoreIC) {
5179 CheckWeakness("function keyedStoreIC(obj, field, value) {"
5180 " obj[field] = value;"
5181 "}"
5182 " (function() {"
5183 " var proto = {'name' : 'weak'};"
5184 " var obj = Object.create(proto);"
5185 " keyedStoreIC(obj, 'x');"
5186 " keyedStoreIC(obj, 'x');"
5187 " keyedStoreIC(obj, 'x');"
5188 " return proto;"
5189 " })();");
5190 }
5191
5192
5193 TEST(WeakMapInPolymorphicKeyedStoreIC) {
5194 CheckWeakness(
5195 "function keyedStoreIC(obj, field, value) {"
5196 " obj[field] = value;"
5197 "}"
5198 " (function() {"
5199 " var proto = {'name' : 'weak'};"
5200 " var obj = Object.create(proto);"
5201 " keyedStoreIC(obj, 'x');"
5202 " keyedStoreIC(obj, 'x');"
5203 " keyedStoreIC(obj, 'x');"
5204 " var poly = Object.create(proto);"
5205 " poly.x = true;"
5206 " keyedStoreIC(poly, 'x');"
5207 " return proto;"
5208 " })();");
5209 }
5210
5211
5212 TEST(WeakMapInMonomorphicCompareNilIC) {
5213 CheckWeakness("function compareNilIC(obj) {"
5214 " return obj == null;"
5215 "}"
5216 " (function() {"
5217 " var proto = {'name' : 'weak'};"
5218 " var obj = Object.create(proto);"
5219 " compareNilIC(obj);"
5220 " compareNilIC(obj);"
5221 " compareNilIC(obj);"
5222 " return proto;"
5223 " })();");
5224 }
5225
5226
5227 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
5228 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
5229 Handle<Object> obj =
5230 Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
5231 return Handle<JSFunction>::cast(obj);
5232 }
5233
5234
5235 void CheckIC(Code* code, Code::Kind kind, SharedFunctionInfo* shared,
5236 int slot_index, InlineCacheState state) {
5237 if (kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC ||
5238 kind == Code::CALL_IC) {
5239 TypeFeedbackVector* vector = shared->feedback_vector();
5240 FeedbackVectorSlot slot(slot_index);
5241 if (kind == Code::LOAD_IC) {
5242 LoadICNexus nexus(vector, slot);
5243 CHECK_EQ(nexus.StateFromFeedback(), state);
5244 } else if (kind == Code::KEYED_LOAD_IC) {
5245 KeyedLoadICNexus nexus(vector, slot);
5246 CHECK_EQ(nexus.StateFromFeedback(), state);
5247 } else if (kind == Code::CALL_IC) {
5248 CallICNexus nexus(vector, slot);
5249 CHECK_EQ(nexus.StateFromFeedback(), state);
5250 }
5251 } else {
5252 Code* ic = FindFirstIC(code, kind);
5253 CHECK(ic->is_inline_cache_stub());
5254 CHECK(ic->ic_state() == state);
5255 }
5256 }
5257
5258
5259 TEST(MonomorphicStaysMonomorphicAfterGC) {
5260 if (FLAG_always_opt) return;
5261 CcTest::InitializeVM();
5262 Isolate* isolate = CcTest::i_isolate();
5263 Heap* heap = isolate->heap();
5264 v8::HandleScope scope(CcTest::isolate());
5265 CompileRun(
5266 "function loadIC(obj) {"
5267 " return obj.name;"
5268 "}"
5269 "function testIC() {"
5270 " var proto = {'name' : 'weak'};"
5271 " var obj = Object.create(proto);"
5272 " loadIC(obj);"
5273 " loadIC(obj);"
5274 " loadIC(obj);"
5275 " return proto;"
5276 "};");
5277 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5278 {
5279 v8::HandleScope scope(CcTest::isolate());
5280 CompileRun("(testIC())");
5281 }
5282 heap->CollectAllGarbage();
5283 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5284 {
5285 v8::HandleScope scope(CcTest::isolate());
5286 CompileRun("(testIC())");
5287 }
5288 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, MONOMORPHIC);
5289 }
5290
5291
5292 TEST(PolymorphicStaysPolymorphicAfterGC) {
5293 if (FLAG_always_opt) return;
5294 CcTest::InitializeVM();
5295 Isolate* isolate = CcTest::i_isolate();
5296 Heap* heap = isolate->heap();
5297 v8::HandleScope scope(CcTest::isolate());
5298 CompileRun(
5299 "function loadIC(obj) {"
5300 " return obj.name;"
5301 "}"
5302 "function testIC() {"
5303 " var proto = {'name' : 'weak'};"
5304 " var obj = Object.create(proto);"
5305 " loadIC(obj);"
5306 " loadIC(obj);"
5307 " loadIC(obj);"
5308 " var poly = Object.create(proto);"
5309 " poly.x = true;"
5310 " loadIC(poly);"
5311 " return proto;"
5312 "};");
5313 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5314 {
5315 v8::HandleScope scope(CcTest::isolate());
5316 CompileRun("(testIC())");
5317 }
5318 heap->CollectAllGarbage();
5319 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5320 {
5321 v8::HandleScope scope(CcTest::isolate());
5322 CompileRun("(testIC())");
5323 }
5324 CheckIC(loadIC->code(), Code::LOAD_IC, loadIC->shared(), 0, POLYMORPHIC);
5325 }
5326
5327
5328 TEST(WeakCell) {
5329 CcTest::InitializeVM();
5330 Isolate* isolate = CcTest::i_isolate();
5331 v8::internal::Heap* heap = CcTest::heap();
5332 v8::internal::Factory* factory = isolate->factory();
5333
5334 HandleScope outer_scope(isolate);
5335 Handle<WeakCell> weak_cell1;
5336 {
5337 HandleScope inner_scope(isolate);
5338 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5339 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5340 }
5341
5342 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5343 Handle<WeakCell> weak_cell2;
5344 {
5345 HandleScope inner_scope(isolate);
5346 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5347 }
5348 CHECK(weak_cell1->value()->IsFixedArray());
5349 CHECK_EQ(*survivor, weak_cell2->value());
5350 heap->CollectGarbage(NEW_SPACE);
5351 CHECK(weak_cell1->value()->IsFixedArray());
5352 CHECK_EQ(*survivor, weak_cell2->value());
5353 heap->CollectGarbage(NEW_SPACE);
5354 CHECK(weak_cell1->value()->IsFixedArray());
5355 CHECK_EQ(*survivor, weak_cell2->value());
5356 heap->CollectAllAvailableGarbage();
5357 CHECK(weak_cell1->cleared());
5358 CHECK_EQ(*survivor, weak_cell2->value());
5359 }
5360
5361
5362 TEST(WeakCellsWithIncrementalMarking) {
5363 CcTest::InitializeVM();
5364 Isolate* isolate = CcTest::i_isolate();
5365 v8::internal::Heap* heap = CcTest::heap();
5366 v8::internal::Factory* factory = isolate->factory();
5367
5368 const int N = 16;
5369 HandleScope outer_scope(isolate);
5370 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5371 Handle<WeakCell> weak_cells[N];
5372
5373 for (int i = 0; i < N; i++) {
5374 HandleScope inner_scope(isolate);
5375 Handle<HeapObject> value =
5376 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5377 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5378 CHECK(weak_cell->value()->IsFixedArray());
5379 IncrementalMarking* marking = heap->incremental_marking();
5380 if (marking->IsStopped()) {
5381 heap->StartIncrementalMarking();
5382 }
5383 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5384 heap->CollectGarbage(NEW_SPACE);
5385 CHECK(weak_cell->value()->IsFixedArray());
5386 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5387 }
5388 heap->CollectAllGarbage();
5389 CHECK_EQ(*survivor, weak_cells[0]->value());
5390 for (int i = 1; i < N; i++) {
5391 CHECK(weak_cells[i]->cleared());
5392 }
5393 }
5394
5395
5396 #ifdef DEBUG
5397 TEST(AddInstructionChangesNewSpacePromotion) {
5398 i::FLAG_allow_natives_syntax = true;
5399 i::FLAG_expose_gc = true;
5400 i::FLAG_stress_compaction = true;
5401 i::FLAG_gc_interval = 1000;
5402 CcTest::InitializeVM();
5403 if (!i::FLAG_allocation_site_pretenuring) return;
5404 v8::HandleScope scope(CcTest::isolate());
5405 Isolate* isolate = CcTest::i_isolate();
5406 Heap* heap = isolate->heap();
5407 LocalContext env;
5408 CompileRun(
5409 "function add(a, b) {"
5410 " return a + b;"
5411 "}"
5412 "add(1, 2);"
5413 "add(\"a\", \"b\");"
5414 "var oldSpaceObject;"
5415 "gc();"
5416 "function crash(x) {"
5417 " var object = {a: null, b: null};"
5418 " var result = add(1.5, x | 0);"
5419 " object.a = result;"
5420 " oldSpaceObject = object;"
5421 " return object;"
5422 "}"
5423 "crash(1);"
5424 "crash(1);"
5425 "%OptimizeFunctionOnNextCall(crash);"
5426 "crash(1);");
5427
5428 v8::Local<v8::Object> global = CcTest::global();
5429 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5430 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5431 v8::Local<v8::Value> args1[] = {v8_num(1)};
5432 heap->DisableInlineAllocation();
5433 heap->set_allocation_timeout(1);
5434 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5435 heap->CollectAllGarbage();
5436 }
5437
5438
5439 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5440 // Exit with 0 if the location matches our expectation.
5441 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5442 }
5443
5444
5445 TEST(CEntryStubOOM) {
5446 i::FLAG_allow_natives_syntax = true;
5447 CcTest::InitializeVM();
5448 v8::HandleScope scope(CcTest::isolate());
5449 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
5450
5451 v8::Local<v8::Value> result = CompileRun(
5452 "%SetFlags('--gc-interval=1');"
5453 "var a = [];"
5454 "a.__proto__ = [];"
5455 "a.unshift(1)");
5456
5457 CHECK(result->IsNumber());
5458 }
5459
5460 #endif // DEBUG
5461
5462
5463 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
5464
5465
5466 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
5467 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
5468 }
5469
5470
5471 UNINITIALIZED_TEST(Regress538257) {
5472 i::FLAG_manual_evacuation_candidates_selection = true;
5473 v8::Isolate::CreateParams create_params;
5474 // Set heap limits.
5475 create_params.constraints.set_max_semi_space_size(1 * Page::kPageSize / MB);
5476 create_params.constraints.set_max_old_space_size(6 * Page::kPageSize / MB);
5477 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5478 v8::Isolate* isolate = v8::Isolate::New(create_params);
5479 isolate->Enter();
5480 {
5481 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5482 HandleScope handle_scope(i_isolate);
5483 PagedSpace* old_space = i_isolate->heap()->old_space();
5484 const int kMaxObjects = 10000;
5485 const int kFixedArrayLen = 512;
5486 Handle<FixedArray> objects[kMaxObjects];
5487 for (int i = 0; (i < kMaxObjects) && old_space->CanExpand(Page::kPageSize);
5488 i++) {
5489 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5490 Page::FromAddress(objects[i]->address())
5491 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5492 }
5493 SimulateFullSpace(old_space);
5494 i_isolate->heap()->CollectGarbage(OLD_SPACE);
5495 // If we get this far, we've successfully aborted compaction. Any further
5496 // allocations might trigger OOM.
5497 }
5498 isolate->Exit();
5499 isolate->Dispose();
5500 }
5501
5502
5503 TEST(Regress357137) {
5504 CcTest::InitializeVM();
5505 v8::Isolate* isolate = CcTest::isolate();
5506 v8::HandleScope hscope(isolate);
5507 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5508 global->Set(
5509 v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
5510 .ToLocalChecked(),
5511 v8::FunctionTemplate::New(isolate, RequestInterrupt));
5512 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
5513 CHECK(!context.IsEmpty());
5514 v8::Context::Scope cscope(context);
5515
5516 v8::Local<v8::Value> result = CompileRun(
5517 "var locals = '';"
5518 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
5519 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
5520 "interrupt();" // This triggers a fake stack overflow in f.
5521 "f()()");
5522 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
5523 }
5524
5525
5526 TEST(Regress507979) {
5527 const int kFixedArrayLen = 10;
5528 CcTest::InitializeVM();
5529 Isolate* isolate = CcTest::i_isolate();
5530 Heap* heap = isolate->heap();
5531 HandleScope handle_scope(isolate);
5532
5533 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5534 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
5535 CHECK(heap->InNewSpace(o1->address()));
5536 CHECK(heap->InNewSpace(o2->address()));
5537
5538 HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
5539
5540 // Replace parts of an object placed before a live object with a filler. This
5541 // way the filler object shares the mark bits with the following live object.
5542 o1->Shrink(kFixedArrayLen - 1);
5543
5544 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
5545 // Let's not optimize the loop away.
5546 CHECK(obj->address() != nullptr);
5547 }
5548 }
5549
5550
5551 TEST(ArrayShiftSweeping) {
5552 i::FLAG_expose_gc = true;
5553 CcTest::InitializeVM();
5554 v8::HandleScope scope(CcTest::isolate());
5555 Isolate* isolate = CcTest::i_isolate();
5556 Heap* heap = isolate->heap();
5557
5558 v8::Local<v8::Value> result = CompileRun(
5559 "var array = new Array(400);"
5560 "var tmp = new Array(1000);"
5561 "array[0] = 10;"
5562 "gc();"
5563 "gc();"
5564 "array.shift();"
5565 "array;");
5566
5567 Handle<JSObject> o = Handle<JSObject>::cast(
5568 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)));
5569 CHECK(heap->InOldSpace(o->elements()));
5570 CHECK(heap->InOldSpace(*o));
5571 Page* page = Page::FromAddress(o->elements()->address());
5572 CHECK(page->parallel_sweeping_state().Value() <=
5573 MemoryChunk::kSweepingFinalize ||
5574 Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
5575 }
5576
5577
5578 UNINITIALIZED_TEST(PromotionQueue) {
5579 i::FLAG_expose_gc = true;
5580 i::FLAG_max_semi_space_size = 2 * (Page::kPageSize / MB);
5581 i::FLAG_min_semi_space_size = i::FLAG_max_semi_space_size;
5582 v8::Isolate::CreateParams create_params;
5583 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5584 v8::Isolate* isolate = v8::Isolate::New(create_params);
5585 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
5586 {
5587 v8::Isolate::Scope isolate_scope(isolate);
5588 v8::HandleScope handle_scope(isolate);
5589 v8::Context::New(isolate)->Enter();
5590 Heap* heap = i_isolate->heap();
5591 NewSpace* new_space = heap->new_space();
5592
5593 // In this test we will try to overwrite the promotion queue which is at the
5594 // end of to-space. To actually make that possible, we need at least two
5595 // semi-space pages and take advantage of fragmentation.
5596 // (1) Use a semi-space consisting of two pages.
5597 // (2) Create a few small long living objects and call the scavenger to
5598 // move them to the other semi-space.
5599 // (3) Create a huge object, i.e., remainder of first semi-space page and
5600 // create another huge object which should be of maximum allocatable memory
5601 // size of the second semi-space page.
5602 // (4) Call the scavenger again.
5603 // What will happen is: the scavenger will promote the objects created in
5604 // (2) and will create promotion queue entries at the end of the second
5605 // semi-space page during the next scavenge when it promotes the objects to
5606 // the old generation. The first allocation of (3) will fill up the first
5607 // semi-space page. The second allocation in (3) will not fit into the
5608 // first semi-space page, but it will overwrite the promotion queue which
5609 // are in the second semi-space page. If the right guards are in place, the
5610 // promotion queue will be evacuated in that case.
5611
5612
5613 CHECK(new_space->IsAtMaximumCapacity());
5614 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5615
5616 // Call the scavenger two times to get an empty new space
5617 heap->CollectGarbage(NEW_SPACE);
5618 heap->CollectGarbage(NEW_SPACE);
5619
5620 // First create a few objects which will survive a scavenge, and will get
5621 // promoted to the old generation later on. These objects will create
5622 // promotion queue entries at the end of the second semi-space page.
5623 const int number_handles = 12;
5624 Handle<FixedArray> handles[number_handles];
5625 for (int i = 0; i < number_handles; i++) {
5626 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5627 }
5628
5629 heap->CollectGarbage(NEW_SPACE);
5630 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5631
5632 // Fill-up the first semi-space page.
5633 FillUpOnePage(new_space);
5634
5635 // Create a small object to initialize the bump pointer on the second
5636 // semi-space page.
5637 Handle<FixedArray> small =
5638 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5639 CHECK(heap->InNewSpace(*small));
5640
5641 // Fill-up the second semi-space page.
5642 FillUpOnePage(new_space);
5643
5644 // This scavenge will corrupt memory if the promotion queue is not
5645 // evacuated.
5646 heap->CollectGarbage(NEW_SPACE);
5647 }
5648 isolate->Dispose();
5649 }
5650
5651
5652 TEST(Regress388880) {
5653 i::FLAG_expose_gc = true;
5654 CcTest::InitializeVM();
5655 v8::HandleScope scope(CcTest::isolate());
5656 Isolate* isolate = CcTest::i_isolate();
5657 Factory* factory = isolate->factory();
5658 Heap* heap = isolate->heap();
5659
5660 Handle<Map> map1 = Map::Create(isolate, 1);
5661 Handle<Map> map2 =
5662 Map::CopyWithField(map1, factory->NewStringFromStaticChars("foo"),
5663 HeapType::Any(isolate), NONE, Representation::Tagged(),
5664 OMIT_TRANSITION).ToHandleChecked();
5665
5666 int desired_offset = Page::kPageSize - map1->instance_size();
5667
5668 // Allocate padding objects in old pointer space so, that object allocated
5669 // afterwards would end at the end of the page.
5670 SimulateFullSpace(heap->old_space());
5671 int padding_size = desired_offset - Page::kObjectStartOffset;
5672 CreatePadding(heap, padding_size, TENURED);
5673
5674 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
5675 o->set_properties(*factory->empty_fixed_array());
5676
5677 // Ensure that the object allocated where we need it.
5678 Page* page = Page::FromAddress(o->address());
5679 CHECK_EQ(desired_offset, page->Offset(o->address()));
5680
5681 // Now we have an object right at the end of the page.
5682
5683 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5684 // that would cause crash.
5685 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5686 marking->Stop();
5687 CcTest::heap()->StartIncrementalMarking();
5688 CHECK(marking->IsMarking());
5689
5690 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5691 // when it calls heap->AdjustLiveBytes(...).
5692 JSObject::MigrateToMap(o, map2);
5693 }
5694
5695
5696 TEST(Regress3631) {
5697 i::FLAG_expose_gc = true;
5698 CcTest::InitializeVM();
5699 v8::HandleScope scope(CcTest::isolate());
5700 Isolate* isolate = CcTest::i_isolate();
5701 Heap* heap = isolate->heap();
5702 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5703 v8::Local<v8::Value> result = CompileRun(
5704 "var weak_map = new WeakMap();"
5705 "var future_keys = [];"
5706 "for (var i = 0; i < 50; i++) {"
5707 " var key = {'k' : i + 0.1};"
5708 " weak_map.set(key, 1);"
5709 " future_keys.push({'x' : i + 0.2});"
5710 "}"
5711 "weak_map");
5712 if (marking->IsStopped()) {
5713 CcTest::heap()->StartIncrementalMarking();
5714 }
5715 // Incrementally mark the backing store.
5716 Handle<JSReceiver> obj =
5717 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5718 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5719 while (!Marking::IsBlack(
5720 Marking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5721 !marking->IsStopped()) {
5722 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5723 }
5724 // Stash the backing store in a handle.
5725 Handle<Object> save(weak_map->table(), isolate);
5726 // The following line will update the backing store.
5727 CompileRun(
5728 "for (var i = 0; i < 50; i++) {"
5729 " weak_map.set(future_keys[i], i);"
5730 "}");
5731 heap->incremental_marking()->set_should_hurry(true);
5732 heap->CollectGarbage(OLD_SPACE);
5733 }
5734
5735
5736 TEST(Regress442710) {
5737 CcTest::InitializeVM();
5738 Isolate* isolate = CcTest::i_isolate();
5739 Heap* heap = isolate->heap();
5740 Factory* factory = isolate->factory();
5741
5742 HandleScope sc(isolate);
5743 Handle<JSGlobalObject> global(
5744 CcTest::i_isolate()->context()->global_object());
5745 Handle<JSArray> array = factory->NewJSArray(2);
5746
5747 Handle<String> name = factory->InternalizeUtf8String("testArray");
5748 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5749 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5750 heap->CollectGarbage(OLD_SPACE);
5751 }
5752
5753
5754 HEAP_TEST(NumberStringCacheSize) {
5755 // Test that the number-string cache has not been resized in the snapshot.
5756 CcTest::InitializeVM();
5757 Isolate* isolate = CcTest::i_isolate();
5758 if (!isolate->snapshot_available()) return;
5759 Heap* heap = isolate->heap();
5760 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5761 heap->number_string_cache()->length());
5762 }
5763
5764
5765 TEST(Regress3877) {
5766 CcTest::InitializeVM();
5767 Isolate* isolate = CcTest::i_isolate();
5768 Heap* heap = isolate->heap();
5769 Factory* factory = isolate->factory();
5770 HandleScope scope(isolate);
5771 CompileRun("function cls() { this.x = 10; }");
5772 Handle<WeakCell> weak_prototype;
5773 {
5774 HandleScope inner_scope(isolate);
5775 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5776 Handle<JSReceiver> proto =
5777 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5778 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5779 }
5780 CHECK(!weak_prototype->cleared());
5781 CompileRun(
5782 "var a = { };"
5783 "a.x = new cls();"
5784 "cls.prototype = null;");
5785 for (int i = 0; i < 4; i++) {
5786 heap->CollectAllGarbage();
5787 }
5788 // The map of a.x keeps prototype alive
5789 CHECK(!weak_prototype->cleared());
5790 // Change the map of a.x and make the previous map garbage collectable.
5791 CompileRun("a.x.__proto__ = {};");
5792 for (int i = 0; i < 4; i++) {
5793 heap->CollectAllGarbage();
5794 }
5795 CHECK(weak_prototype->cleared());
5796 }
5797
5798
5799 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5800 HandleScope inner_scope(isolate);
5801 Handle<Map> map = Map::Create(isolate, 1);
5802 v8::Local<v8::Value> result =
5803 CompileRun("(function () { return {x : 10}; })();");
5804 Handle<JSReceiver> proto =
5805 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5806 Map::SetPrototype(map, proto);
5807 heap->AddRetainedMap(map);
5808 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5809 }
5810
5811
5812 void CheckMapRetainingFor(int n) {
5813 FLAG_retain_maps_for_n_gc = n;
5814 Isolate* isolate = CcTest::i_isolate();
5815 Heap* heap = isolate->heap();
5816 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5817 CHECK(!weak_cell->cleared());
5818 for (int i = 0; i < n; i++) {
5819 heap->CollectGarbage(OLD_SPACE);
5820 }
5821 CHECK(!weak_cell->cleared());
5822 heap->CollectGarbage(OLD_SPACE);
5823 CHECK(weak_cell->cleared());
5824 }
5825
5826
5827 TEST(MapRetaining) {
5828 CcTest::InitializeVM();
5829 v8::HandleScope scope(CcTest::isolate());
5830 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5831 CheckMapRetainingFor(0);
5832 CheckMapRetainingFor(1);
5833 CheckMapRetainingFor(7);
5834 }
5835
5836
5837 TEST(RegressArrayListGC) {
5838 FLAG_retain_maps_for_n_gc = 1;
5839 FLAG_incremental_marking = 0;
5840 FLAG_gc_global = true;
5841 CcTest::InitializeVM();
5842 v8::HandleScope scope(CcTest::isolate());
5843 Isolate* isolate = CcTest::i_isolate();
5844 Heap* heap = isolate->heap();
5845 AddRetainedMap(isolate, heap);
5846 Handle<Map> map = Map::Create(isolate, 1);
5847 heap->CollectGarbage(OLD_SPACE);
5848 // Force GC in old space on next addition of retained map.
5849 Map::WeakCellForMap(map);
5850 SimulateFullSpace(CcTest::heap()->new_space());
5851 for (int i = 0; i < 10; i++) {
5852 heap->AddRetainedMap(map);
5853 }
5854 heap->CollectGarbage(OLD_SPACE);
5855 }
5856
5857
5858 #ifdef DEBUG
5859 TEST(PathTracer) {
5860 CcTest::InitializeVM();
5861 v8::HandleScope scope(CcTest::isolate());
5862
5863 v8::Local<v8::Value> result = CompileRun("'abc'");
5864 Handle<Object> o = v8::Utils::OpenHandle(*result);
5865 CcTest::i_isolate()->heap()->TracePathToObject(*o);
5866 }
5867 #endif // DEBUG
5868
5869
5870 TEST(WritableVsImmortalRoots) {
5871 for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
5872 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
5873 bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
5874 bool immortal = Heap::RootIsImmortalImmovable(root_index);
5875 // A root value can be writable, immortal, or neither, but not both.
5876 CHECK(!immortal || !writable);
5877 }
5878 }
5879
5880
5881 static void TestRightTrimFixedTypedArray(i::ExternalArrayType type,
5882 int initial_length,
5883 int elements_to_trim) {
5884 v8::HandleScope scope(CcTest::isolate());
5885 Isolate* isolate = CcTest::i_isolate();
5886 Factory* factory = isolate->factory();
5887 Heap* heap = isolate->heap();
5888
5889 Handle<FixedTypedArrayBase> array =
5890 factory->NewFixedTypedArray(initial_length, type, true);
5891 int old_size = array->size();
5892 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array,
5893 elements_to_trim);
5894
5895 // Check that free space filler is at the right place and did not smash the
5896 // array header.
5897 CHECK(array->IsFixedArrayBase());
5898 CHECK_EQ(initial_length - elements_to_trim, array->length());
5899 int new_size = array->size();
5900 if (new_size != old_size) {
5901 // Free space filler should be created in this case.
5902 Address next_obj_address = array->address() + array->size();
5903 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
5904 }
5905 heap->CollectAllAvailableGarbage();
5906 }
5907
5908
5909 TEST(Regress472513) {
5910 CcTest::InitializeVM();
5911 v8::HandleScope scope(CcTest::isolate());
5912
5913 // The combination of type/initial_length/elements_to_trim triggered
5914 // typed array header smashing with free space filler (crbug/472513).
5915
5916 // 64-bit cases.
5917 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32, 6);
5918 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 32 - 7, 6);
5919 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16, 6);
5920 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 16 - 3, 6);
5921 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8, 6);
5922 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 8 - 1, 6);
5923
5924 // 32-bit cases.
5925 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16, 3);
5926 TestRightTrimFixedTypedArray(i::kExternalUint8Array, 16 - 3, 3);
5927 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8, 3);
5928 TestRightTrimFixedTypedArray(i::kExternalUint16Array, 8 - 1, 3);
5929 TestRightTrimFixedTypedArray(i::kExternalUint32Array, 4, 3);
5930 }
5931
5932
5933 TEST(WeakFixedArray) {
5934 CcTest::InitializeVM();
5935 v8::HandleScope scope(CcTest::isolate());
5936
5937 Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
5938 Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
5939 array->Remove(number);
5940 array->Compact<WeakFixedArray::NullCallback>();
5941 WeakFixedArray::Add(array, number);
5942 }
5943
5944
5945 TEST(PreprocessStackTrace) {
5946 // Do not automatically trigger early GC.
5947 FLAG_gc_interval = -1;
5948 CcTest::InitializeVM();
5949 v8::HandleScope scope(CcTest::isolate());
5950 v8::TryCatch try_catch(CcTest::isolate());
5951 CompileRun("throw new Error();");
5952 CHECK(try_catch.HasCaught());
5953 Isolate* isolate = CcTest::i_isolate();
5954 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5955 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5956 Handle<Object> stack_trace =
5957 JSObject::GetProperty(exception, key).ToHandleChecked();
5958 Handle<Object> code =
5959 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5960 CHECK(code->IsCode());
5961
5962 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing");
5963
5964 Handle<Object> pos =
5965 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5966 CHECK(pos->IsSmi());
5967
5968 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5969 int array_length = Smi::cast(stack_trace_array->length())->value();
5970 for (int i = 0; i < array_length; i++) {
5971 Handle<Object> element =
5972 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5973 CHECK(!element->IsCode());
5974 }
5975 }
5976
5977
5978 static bool utils_has_been_collected = false;
5979
5980 static void UtilsHasBeenCollected(
5981 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5982 utils_has_been_collected = true;
5983 data.GetParameter()->Reset();
5984 }
5985
5986
5987 TEST(BootstrappingExports) {
5988 // Expose utils object and delete it to observe that it is indeed
5989 // being garbage-collected.
5990 FLAG_expose_natives_as = "utils";
5991 CcTest::InitializeVM();
5992 v8::Isolate* isolate = CcTest::isolate();
5993 LocalContext env;
5994
5995 if (Snapshot::HaveASnapshotToStartFrom(CcTest::i_isolate())) return;
5996
5997 utils_has_been_collected = false;
5998
5999 v8::Persistent<v8::Object> utils;
6000
6001 {
6002 v8::HandleScope scope(isolate);
6003 v8::Local<v8::String> name = v8_str("utils");
6004 utils.Reset(isolate, CcTest::global()
6005 ->Get(env.local(), name)
6006 .ToLocalChecked()
6007 ->ToObject(env.local())
6008 .ToLocalChecked());
6009 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6010 }
6011
6012 utils.SetWeak(&utils, UtilsHasBeenCollected,
6013 v8::WeakCallbackType::kParameter);
6014
6015 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks");
6016
6017 CHECK(utils_has_been_collected);
6018 }
6019
6020
6021 TEST(Regress1878) {
6022 FLAG_allow_natives_syntax = true;
6023 CcTest::InitializeVM();
6024 v8::Isolate* isolate = CcTest::isolate();
6025 v8::HandleScope scope(isolate);
6026 v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
6027 CcTest::i_isolate()->internal_array_function());
6028 LocalContext env;
6029 CHECK(CcTest::global()
6030 ->Set(env.local(), v8_str("InternalArray"), constructor)
6031 .FromJust());
6032
6033 v8::TryCatch try_catch(isolate);
6034
6035 CompileRun(
6036 "var a = Array();"
6037 "for (var i = 0; i < 1000; i++) {"
6038 " var ai = new InternalArray(10000);"
6039 " if (%HaveSameMap(ai, a)) throw Error();"
6040 " if (!%HasFastObjectElements(ai)) throw Error();"
6041 "}"
6042 "for (var i = 0; i < 1000; i++) {"
6043 " var ai = new InternalArray(10000);"
6044 " if (%HaveSameMap(ai, a)) throw Error();"
6045 " if (!%HasFastObjectElements(ai)) throw Error();"
6046 "}");
6047
6048 CHECK(!try_catch.HasCaught());
6049 }
6050
6051
6052 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
6053 CHECK(bytes >= FixedArray::kHeaderSize);
6054 CHECK(bytes % kPointerSize == 0);
6055 Factory* factory = isolate->factory();
6056 HandleScope scope(isolate);
6057 AlwaysAllocateScope always_allocate(isolate);
6058 int elements =
6059 static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
6060 Handle<FixedArray> array = factory->NewFixedArray(
6061 elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
6062 CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
6063 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6064 }
6065
6066
6067 TEST(NewSpaceAllocationCounter) {
6068 CcTest::InitializeVM();
6069 v8::HandleScope scope(CcTest::isolate());
6070 Isolate* isolate = CcTest::i_isolate();
6071 Heap* heap = isolate->heap();
6072 size_t counter1 = heap->NewSpaceAllocationCounter();
6073 heap->CollectGarbage(NEW_SPACE);
6074 const size_t kSize = 1024;
6075 AllocateInSpace(isolate, kSize, NEW_SPACE);
6076 size_t counter2 = heap->NewSpaceAllocationCounter();
6077 CHECK_EQ(kSize, counter2 - counter1);
6078 heap->CollectGarbage(NEW_SPACE);
6079 size_t counter3 = heap->NewSpaceAllocationCounter();
6080 CHECK_EQ(0U, counter3 - counter2);
6081 // Test counter overflow.
6082 size_t max_counter = -1;
6083 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6084 size_t start = heap->NewSpaceAllocationCounter();
6085 for (int i = 0; i < 20; i++) {
6086 AllocateInSpace(isolate, kSize, NEW_SPACE);
6087 size_t counter = heap->NewSpaceAllocationCounter();
6088 CHECK_EQ(kSize, counter - start);
6089 start = counter;
6090 }
6091 }
6092
6093
6094 TEST(OldSpaceAllocationCounter) {
6095 CcTest::InitializeVM();
6096 v8::HandleScope scope(CcTest::isolate());
6097 Isolate* isolate = CcTest::i_isolate();
6098 Heap* heap = isolate->heap();
6099 size_t counter1 = heap->OldGenerationAllocationCounter();
6100 heap->CollectGarbage(NEW_SPACE);
6101 heap->CollectGarbage(NEW_SPACE);
6102 const size_t kSize = 1024;
6103 AllocateInSpace(isolate, kSize, OLD_SPACE);
6104 size_t counter2 = heap->OldGenerationAllocationCounter();
6105 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6106 CHECK_LE(kSize, counter2 - counter1);
6107 heap->CollectGarbage(NEW_SPACE);
6108 size_t counter3 = heap->OldGenerationAllocationCounter();
6109 CHECK_EQ(0u, counter3 - counter2);
6110 AllocateInSpace(isolate, kSize, OLD_SPACE);
6111 heap->CollectGarbage(OLD_SPACE);
6112 size_t counter4 = heap->OldGenerationAllocationCounter();
6113 CHECK_LE(kSize, counter4 - counter3);
6114 // Test counter overflow.
6115 size_t max_counter = -1;
6116 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6117 size_t start = heap->OldGenerationAllocationCounter();
6118 for (int i = 0; i < 20; i++) {
6119 AllocateInSpace(isolate, kSize, OLD_SPACE);
6120 size_t counter = heap->OldGenerationAllocationCounter();
6121 CHECK_LE(kSize, counter - start);
6122 start = counter;
6123 }
6124 }
6125
6126
6127 TEST(NewSpaceAllocationThroughput) {
6128 CcTest::InitializeVM();
6129 v8::HandleScope scope(CcTest::isolate());
6130 Isolate* isolate = CcTest::i_isolate();
6131 Heap* heap = isolate->heap();
6132 GCTracer* tracer = heap->tracer();
6133 int time1 = 100;
6134 size_t counter1 = 1000;
6135 tracer->SampleAllocation(time1, counter1, 0);
6136 int time2 = 200;
6137 size_t counter2 = 2000;
6138 tracer->SampleAllocation(time2, counter2, 0);
6139 size_t throughput =
6140 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6141 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6142 int time3 = 1000;
6143 size_t counter3 = 30000;
6144 tracer->SampleAllocation(time3, counter3, 0);
6145 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond();
6146 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6147 }
6148
6149
6150 TEST(NewSpaceAllocationThroughput2) {
6151 CcTest::InitializeVM();
6152 v8::HandleScope scope(CcTest::isolate());
6153 Isolate* isolate = CcTest::i_isolate();
6154 Heap* heap = isolate->heap();
6155 GCTracer* tracer = heap->tracer();
6156 int time1 = 100;
6157 size_t counter1 = 1000;
6158 tracer->SampleAllocation(time1, counter1, 0);
6159 int time2 = 200;
6160 size_t counter2 = 2000;
6161 tracer->SampleAllocation(time2, counter2, 0);
6162 size_t throughput =
6163 tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6164 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6165 int time3 = 1000;
6166 size_t counter3 = 30000;
6167 tracer->SampleAllocation(time3, counter3, 0);
6168 throughput = tracer->NewSpaceAllocationThroughputInBytesPerMillisecond(100);
6169 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6170 }
6171
6172
6173 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
6174 Isolate* isolate = CcTest::i_isolate();
6175 Object* message =
6176 *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
6177 CHECK(message->IsTheHole());
6178 }
6179
6180
6181 TEST(MessageObjectLeak) {
6182 CcTest::InitializeVM();
6183 v8::Isolate* isolate = CcTest::isolate();
6184 v8::HandleScope scope(isolate);
6185 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6186 global->Set(
6187 v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
6188 .ToLocalChecked(),
6189 v8::FunctionTemplate::New(isolate, CheckLeak));
6190 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6191 v8::Context::Scope cscope(context);
6192
6193 const char* test =
6194 "try {"
6195 " throw 'message 1';"
6196 "} catch (e) {"
6197 "}"
6198 "check();"
6199 "L: try {"
6200 " throw 'message 2';"
6201 "} finally {"
6202 " break L;"
6203 "}"
6204 "check();";
6205 CompileRun(test);
6206
6207 const char* flag = "--turbo-filter=*";
6208 FlagList::SetFlagsFromString(flag, StrLength(flag));
6209 FLAG_always_opt = true;
6210 FLAG_turbo_try_finally = true;
6211
6212 CompileRun(test);
6213 }
6214
6215
6216 static void CheckEqualSharedFunctionInfos(
6217 const v8::FunctionCallbackInfo<v8::Value>& args) {
6218 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
6219 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
6220 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
6221 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
6222 CHECK(fun1->shared() == fun2->shared());
6223 }
6224
6225
6226 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6227 Isolate* isolate = CcTest::i_isolate();
6228 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6229 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6230 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6231 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6232 isolate->heap()->CollectAllAvailableGarbage("remove code and gc");
6233 }
6234
6235
6236 TEST(CanonicalSharedFunctionInfo) {
6237 CcTest::InitializeVM();
6238 v8::Isolate* isolate = CcTest::isolate();
6239 v8::HandleScope scope(isolate);
6240 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6241 global->Set(isolate, "check", v8::FunctionTemplate::New(
6242 isolate, CheckEqualSharedFunctionInfos));
6243 global->Set(isolate, "remove",
6244 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
6245 v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
6246 v8::Context::Scope cscope(context);
6247 CompileRun(
6248 "function f() { return function g() {}; }"
6249 "var g1 = f();"
6250 "remove(f);"
6251 "var g2 = f();"
6252 "check(g1, g2);");
6253
6254 CompileRun(
6255 "function f() { return (function() { return function g() {}; })(); }"
6256 "var g1 = f();"
6257 "remove(f);"
6258 "var g2 = f();"
6259 "check(g1, g2);");
6260 }
6261
6262
6263 TEST(OldGenerationAllocationThroughput) {
6264 CcTest::InitializeVM();
6265 v8::HandleScope scope(CcTest::isolate());
6266 Isolate* isolate = CcTest::i_isolate();
6267 Heap* heap = isolate->heap();
6268 GCTracer* tracer = heap->tracer();
6269 int time1 = 100;
6270 size_t counter1 = 1000;
6271 tracer->SampleAllocation(time1, 0, counter1);
6272 int time2 = 200;
6273 size_t counter2 = 2000;
6274 tracer->SampleAllocation(time2, 0, counter2);
6275 size_t throughput =
6276 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6277 CHECK_EQ((counter2 - counter1) / (time2 - time1), throughput);
6278 int time3 = 1000;
6279 size_t counter3 = 30000;
6280 tracer->SampleAllocation(time3, 0, counter3);
6281 throughput =
6282 tracer->OldGenerationAllocationThroughputInBytesPerMillisecond(100);
6283 CHECK_EQ((counter3 - counter1) / (time3 - time1), throughput);
6284 }
6285
6286
6287 TEST(AllocationThroughput) {
6288 CcTest::InitializeVM();
6289 v8::HandleScope scope(CcTest::isolate());
6290 Isolate* isolate = CcTest::i_isolate();
6291 Heap* heap = isolate->heap();
6292 GCTracer* tracer = heap->tracer();
6293 int time1 = 100;
6294 size_t counter1 = 1000;
6295 tracer->SampleAllocation(time1, counter1, counter1);
6296 int time2 = 200;
6297 size_t counter2 = 2000;
6298 tracer->SampleAllocation(time2, counter2, counter2);
6299 size_t throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6300 CHECK_EQ(2 * (counter2 - counter1) / (time2 - time1), throughput);
6301 int time3 = 1000;
6302 size_t counter3 = 30000;
6303 tracer->SampleAllocation(time3, counter3, counter3);
6304 throughput = tracer->AllocationThroughputInBytesPerMillisecond(100);
6305 CHECK_EQ(2 * (counter3 - counter1) / (time3 - time1), throughput);
6306 }
6307
6308
6309 TEST(ContextMeasure) {
6310 CcTest::InitializeVM();
6311 v8::HandleScope scope(CcTest::isolate());
6312 Isolate* isolate = CcTest::i_isolate();
6313 LocalContext context;
6314
6315 int size_upper_limit = 0;
6316 int count_upper_limit = 0;
6317 HeapIterator it(CcTest::heap());
6318 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6319 size_upper_limit += obj->Size();
6320 count_upper_limit++;
6321 }
6322
6323 ContextMeasure measure(*isolate->native_context());
6324
6325 PrintF("Context size : %d bytes\n", measure.Size());
6326 PrintF("Context object count: %d\n", measure.Count());
6327
6328 CHECK_LE(1000, measure.Count());
6329 CHECK_LE(50000, measure.Size());
6330
6331 CHECK_LE(measure.Count(), count_upper_limit);
6332 CHECK_LE(measure.Size(), size_upper_limit);
6333 }
6334
6335
6336 TEST(ScriptIterator) {
6337 CcTest::InitializeVM();
6338 v8::HandleScope scope(CcTest::isolate());
6339 Isolate* isolate = CcTest::i_isolate();
6340 Heap* heap = CcTest::heap();
6341 LocalContext context;
6342
6343 heap->CollectAllGarbage();
6344
6345 int script_count = 0;
6346 {
6347 HeapIterator it(heap);
6348 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6349 if (obj->IsScript()) script_count++;
6350 }
6351 }
6352
6353 {
6354 Script::Iterator iterator(isolate);
6355 while (iterator.Next()) script_count--;
6356 }
6357
6358 CHECK_EQ(0, script_count);
6359 }
6360
6361
6362 TEST(SharedFunctionInfoIterator) {
6363 CcTest::InitializeVM();
6364 v8::HandleScope scope(CcTest::isolate());
6365 Isolate* isolate = CcTest::i_isolate();
6366 Heap* heap = CcTest::heap();
6367 LocalContext context;
6368
6369 heap->CollectAllGarbage();
6370 heap->CollectAllGarbage();
6371
6372 int sfi_count = 0;
6373 {
6374 HeapIterator it(heap);
6375 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6376 if (!obj->IsSharedFunctionInfo()) continue;
6377 sfi_count++;
6378 }
6379 }
6380
6381 {
6382 SharedFunctionInfo::Iterator iterator(isolate);
6383 while (iterator.Next()) sfi_count--;
6384 }
6385
6386 CHECK_EQ(0, sfi_count);
6387 }
6388
6389
6390 template <typename T>
6391 static UniqueId MakeUniqueId(const Persistent<T>& p) {
6392 return UniqueId(reinterpret_cast<uintptr_t>(*v8::Utils::OpenPersistent(p)));
6393 }
6394
6395
6396 TEST(Regress519319) {
6397 CcTest::InitializeVM();
6398 v8::Isolate* isolate = CcTest::isolate();
6399 v8::HandleScope scope(isolate);
6400 Heap* heap = CcTest::heap();
6401 LocalContext context;
6402
6403 v8::Persistent<Value> parent;
6404 v8::Persistent<Value> child;
6405
6406 parent.Reset(isolate, v8::Object::New(isolate));
6407 child.Reset(isolate, v8::Object::New(isolate));
6408
6409 SimulateFullSpace(heap->old_space());
6410 heap->CollectGarbage(OLD_SPACE);
6411 {
6412 UniqueId id = MakeUniqueId(parent);
6413 isolate->SetObjectGroupId(parent, id);
6414 isolate->SetReferenceFromGroup(id, child);
6415 }
6416 // The CollectGarbage call above starts sweeper threads.
6417 // The crash will happen if the following two functions
6418 // are called before sweeping finishes.
6419 heap->StartIncrementalMarking();
6420 heap->FinalizeIncrementalMarkingIfComplete("test");
6421 }
6422
6423
6424 HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6425 CcTest::InitializeVM();
6426 v8::HandleScope scope(CcTest::isolate());
6427 Heap* heap = CcTest::heap();
6428 Isolate* isolate = CcTest::i_isolate();
6429 MemoryReducer* memory_reducer = heap->memory_reducer_;
6430 memory_reducer->SampleAndGetJsCallsPerMs(0);
6431 isolate->IncrementJsCallsFromApiCounter();
6432 isolate->IncrementJsCallsFromApiCounter();
6433 isolate->IncrementJsCallsFromApiCounter();
6434 double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1);
6435 CheckDoubleEquals(3, calls_per_ms);
6436
6437 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2);
6438 CheckDoubleEquals(0, calls_per_ms);
6439
6440 isolate->IncrementJsCallsFromApiCounter();
6441 isolate->IncrementJsCallsFromApiCounter();
6442 isolate->IncrementJsCallsFromApiCounter();
6443 isolate->IncrementJsCallsFromApiCounter();
6444 calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4);
6445 CheckDoubleEquals(2, calls_per_ms);
6446 }
6447
6448
6449 } // namespace internal
6450 } // namespace v8
OLDNEW
« no previous file with comments | « test/cctest/test-dictionary.cc ('k') | test/cctest/test-incremental-marking.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698