Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(37)

Side by Side Diff: test/cctest/test-serialize.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2007-2010 the V8 project authors. All rights reserved. 1 // Copyright 2007-2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
92 return encoder.Encode(AddressOf(id)); 92 return encoder.Encode(AddressOf(id));
93 } 93 }
94 94
95 95
96 static int make_code(TypeCode type, int id) { 96 static int make_code(TypeCode type, int id) {
97 return static_cast<uint32_t>(type) << kReferenceTypeShift | id; 97 return static_cast<uint32_t>(type) << kReferenceTypeShift | id;
98 } 98 }
99 99
100 100
101 TEST(ExternalReferenceEncoder) { 101 TEST(ExternalReferenceEncoder) {
102 StatsTable::SetCounterFunction(counter_function); 102 OS::Setup();
103 Heap::Setup(false); 103 i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function);
104 HEAP->Setup(false);
104 ExternalReferenceEncoder encoder; 105 ExternalReferenceEncoder encoder;
105 CHECK_EQ(make_code(BUILTIN, Builtins::ArrayCode), 106 CHECK_EQ(make_code(BUILTIN, Builtins::ArrayCode),
106 Encode(encoder, Builtins::ArrayCode)); 107 Encode(encoder, Builtins::ArrayCode));
107 CHECK_EQ(make_code(v8::internal::RUNTIME_FUNCTION, Runtime::kAbort), 108 CHECK_EQ(make_code(v8::internal::RUNTIME_FUNCTION, Runtime::kAbort),
108 Encode(encoder, Runtime::kAbort)); 109 Encode(encoder, Runtime::kAbort));
109 CHECK_EQ(make_code(IC_UTILITY, IC::kLoadCallbackProperty), 110 CHECK_EQ(make_code(IC_UTILITY, IC::kLoadCallbackProperty),
110 Encode(encoder, IC_Utility(IC::kLoadCallbackProperty))); 111 Encode(encoder, IC_Utility(IC::kLoadCallbackProperty)));
111 ExternalReference keyed_load_function_prototype = 112 ExternalReference keyed_load_function_prototype =
112 ExternalReference(&Counters::keyed_load_function_prototype); 113 ExternalReference(COUNTERS->keyed_load_function_prototype());
113 CHECK_EQ(make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype), 114 CHECK_EQ(make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype),
114 encoder.Encode(keyed_load_function_prototype.address())); 115 encoder.Encode(keyed_load_function_prototype.address()));
115 ExternalReference the_hole_value_location = 116 ExternalReference the_hole_value_location =
116 ExternalReference::the_hole_value_location(); 117 ExternalReference::the_hole_value_location();
117 CHECK_EQ(make_code(UNCLASSIFIED, 2), 118 CHECK_EQ(make_code(UNCLASSIFIED, 2),
118 encoder.Encode(the_hole_value_location.address())); 119 encoder.Encode(the_hole_value_location.address()));
119 ExternalReference stack_limit_address = 120 ExternalReference stack_limit_address =
120 ExternalReference::address_of_stack_limit(); 121 ExternalReference::address_of_stack_limit();
121 CHECK_EQ(make_code(UNCLASSIFIED, 4), 122 CHECK_EQ(make_code(UNCLASSIFIED, 4),
122 encoder.Encode(stack_limit_address.address())); 123 encoder.Encode(stack_limit_address.address()));
123 ExternalReference real_stack_limit_address = 124 ExternalReference real_stack_limit_address =
124 ExternalReference::address_of_real_stack_limit(); 125 ExternalReference::address_of_real_stack_limit();
125 CHECK_EQ(make_code(UNCLASSIFIED, 5), 126 CHECK_EQ(make_code(UNCLASSIFIED, 5),
126 encoder.Encode(real_stack_limit_address.address())); 127 encoder.Encode(real_stack_limit_address.address()));
127 #ifdef ENABLE_DEBUGGER_SUPPORT 128 #ifdef ENABLE_DEBUGGER_SUPPORT
128 CHECK_EQ(make_code(UNCLASSIFIED, 15), 129 CHECK_EQ(make_code(UNCLASSIFIED, 15),
129 encoder.Encode(ExternalReference::debug_break().address())); 130 encoder.Encode(ExternalReference::debug_break().address()));
130 #endif // ENABLE_DEBUGGER_SUPPORT 131 #endif // ENABLE_DEBUGGER_SUPPORT
131 CHECK_EQ(make_code(UNCLASSIFIED, 10), 132 CHECK_EQ(make_code(UNCLASSIFIED, 10),
132 encoder.Encode(ExternalReference::new_space_start().address())); 133 encoder.Encode(ExternalReference::new_space_start().address()));
133 CHECK_EQ(make_code(UNCLASSIFIED, 3), 134 CHECK_EQ(make_code(UNCLASSIFIED, 3),
134 encoder.Encode(ExternalReference::roots_address().address())); 135 encoder.Encode(ExternalReference::roots_address().address()));
135 } 136 }
136 137
137 138
138 TEST(ExternalReferenceDecoder) { 139 TEST(ExternalReferenceDecoder) {
139 StatsTable::SetCounterFunction(counter_function); 140 OS::Setup();
140 Heap::Setup(false); 141 i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function);
142 HEAP->Setup(false);
141 ExternalReferenceDecoder decoder; 143 ExternalReferenceDecoder decoder;
142 CHECK_EQ(AddressOf(Builtins::ArrayCode), 144 CHECK_EQ(AddressOf(Builtins::ArrayCode),
143 decoder.Decode(make_code(BUILTIN, Builtins::ArrayCode))); 145 decoder.Decode(make_code(BUILTIN, Builtins::ArrayCode)));
144 CHECK_EQ(AddressOf(Runtime::kAbort), 146 CHECK_EQ(AddressOf(Runtime::kAbort),
145 decoder.Decode(make_code(v8::internal::RUNTIME_FUNCTION, 147 decoder.Decode(make_code(v8::internal::RUNTIME_FUNCTION,
146 Runtime::kAbort))); 148 Runtime::kAbort)));
147 CHECK_EQ(AddressOf(IC_Utility(IC::kLoadCallbackProperty)), 149 CHECK_EQ(AddressOf(IC_Utility(IC::kLoadCallbackProperty)),
148 decoder.Decode(make_code(IC_UTILITY, IC::kLoadCallbackProperty))); 150 decoder.Decode(make_code(IC_UTILITY, IC::kLoadCallbackProperty)));
149 ExternalReference keyed_load_function = 151 ExternalReference keyed_load_function =
150 ExternalReference(&Counters::keyed_load_function_prototype); 152 ExternalReference(COUNTERS->keyed_load_function_prototype());
151 CHECK_EQ(keyed_load_function.address(), 153 CHECK_EQ(keyed_load_function.address(),
152 decoder.Decode( 154 decoder.Decode(
153 make_code(STATS_COUNTER, 155 make_code(STATS_COUNTER,
154 Counters::k_keyed_load_function_prototype))); 156 Counters::k_keyed_load_function_prototype)));
155 CHECK_EQ(ExternalReference::the_hole_value_location().address(), 157 CHECK_EQ(ExternalReference::the_hole_value_location().address(),
156 decoder.Decode(make_code(UNCLASSIFIED, 2))); 158 decoder.Decode(make_code(UNCLASSIFIED, 2)));
157 CHECK_EQ(ExternalReference::address_of_stack_limit().address(), 159 CHECK_EQ(ExternalReference::address_of_stack_limit().address(),
158 decoder.Decode(make_code(UNCLASSIFIED, 4))); 160 decoder.Decode(make_code(UNCLASSIFIED, 4)));
159 CHECK_EQ(ExternalReference::address_of_real_stack_limit().address(), 161 CHECK_EQ(ExternalReference::address_of_real_stack_limit().address(),
160 decoder.Decode(make_code(UNCLASSIFIED, 5))); 162 decoder.Decode(make_code(UNCLASSIFIED, 5)));
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
269 // Tests that the heap can be deserialized. 271 // Tests that the heap can be deserialized.
270 272
271 static void Deserialize() { 273 static void Deserialize() {
272 CHECK(Snapshot::Initialize(FLAG_testing_serialization_file)); 274 CHECK(Snapshot::Initialize(FLAG_testing_serialization_file));
273 } 275 }
274 276
275 277
276 static void SanityCheck() { 278 static void SanityCheck() {
277 v8::HandleScope scope; 279 v8::HandleScope scope;
278 #ifdef DEBUG 280 #ifdef DEBUG
279 Heap::Verify(); 281 HEAP->Verify();
280 #endif 282 #endif
281 CHECK(Top::global()->IsJSObject()); 283 CHECK(Isolate::Current()->global()->IsJSObject());
282 CHECK(Top::global_context()->IsContext()); 284 CHECK(Isolate::Current()->global_context()->IsContext());
283 CHECK(Heap::symbol_table()->IsSymbolTable()); 285 CHECK(HEAP->symbol_table()->IsSymbolTable());
284 CHECK(!Factory::LookupAsciiSymbol("Empty")->IsFailure()); 286 CHECK(!FACTORY->LookupAsciiSymbol("Empty")->IsFailure());
285 } 287 }
286 288
287 289
288 DEPENDENT_TEST(Deserialize, Serialize) { 290 DEPENDENT_TEST(Deserialize, Serialize) {
289 // The serialize-deserialize tests only work if the VM is built without 291 // The serialize-deserialize tests only work if the VM is built without
290 // serialization. That doesn't matter. We don't need to be able to 292 // serialization. That doesn't matter. We don't need to be able to
291 // serialize a snapshot in a VM that is booted from a snapshot. 293 // serialize a snapshot in a VM that is booted from a snapshot.
292 if (!Snapshot::IsEnabled()) { 294 if (!Snapshot::IsEnabled()) {
293 v8::HandleScope scope; 295 v8::HandleScope scope;
294
295 Deserialize(); 296 Deserialize();
296 297
297 v8::Persistent<v8::Context> env = v8::Context::New(); 298 v8::Persistent<v8::Context> env = v8::Context::New();
298 env->Enter(); 299 env->Enter();
299 300
300 SanityCheck(); 301 SanityCheck();
301 } 302 }
302 } 303 }
303 304
304 305
305 DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) { 306 DEPENDENT_TEST(DeserializeFromSecondSerialization, SerializeTwice) {
306 if (!Snapshot::IsEnabled()) { 307 if (!Snapshot::IsEnabled()) {
307 v8::HandleScope scope; 308 v8::HandleScope scope;
308
309 Deserialize(); 309 Deserialize();
310 310
311 v8::Persistent<v8::Context> env = v8::Context::New(); 311 v8::Persistent<v8::Context> env = v8::Context::New();
312 env->Enter(); 312 env->Enter();
313 313
314 SanityCheck(); 314 SanityCheck();
315 } 315 }
316 } 316 }
317 317
318 318
319 DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) { 319 DEPENDENT_TEST(DeserializeAndRunScript2, Serialize) {
320 if (!Snapshot::IsEnabled()) { 320 if (!Snapshot::IsEnabled()) {
321 v8::HandleScope scope; 321 v8::HandleScope scope;
322
323 Deserialize(); 322 Deserialize();
324 323
325 v8::Persistent<v8::Context> env = v8::Context::New(); 324 v8::Persistent<v8::Context> env = v8::Context::New();
326 env->Enter(); 325 env->Enter();
327 326
328 const char* c_source = "\"1234\".length"; 327 const char* c_source = "\"1234\".length";
329 v8::Local<v8::String> source = v8::String::New(c_source); 328 v8::Local<v8::String> source = v8::String::New(c_source);
330 v8::Local<v8::Script> script = v8::Script::Compile(source); 329 v8::Local<v8::Script> script = v8::Script::Compile(source);
331 CHECK_EQ(4, script->Run()->Int32Value()); 330 CHECK_EQ(4, script->Run()->Int32Value());
332 } 331 }
333 } 332 }
334 333
335 334
336 DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2, 335 DEPENDENT_TEST(DeserializeFromSecondSerializationAndRunScript2,
337 SerializeTwice) { 336 SerializeTwice) {
338 if (!Snapshot::IsEnabled()) { 337 if (!Snapshot::IsEnabled()) {
339 v8::HandleScope scope; 338 v8::HandleScope scope;
340
341 Deserialize(); 339 Deserialize();
342 340
343 v8::Persistent<v8::Context> env = v8::Context::New(); 341 v8::Persistent<v8::Context> env = v8::Context::New();
344 env->Enter(); 342 env->Enter();
345 343
346 const char* c_source = "\"1234\".length"; 344 const char* c_source = "\"1234\".length";
347 v8::Local<v8::String> source = v8::String::New(c_source); 345 v8::Local<v8::String> source = v8::String::New(c_source);
348 v8::Local<v8::Script> script = v8::Script::Compile(source); 346 v8::Local<v8::Script> script = v8::Script::Compile(source);
349 CHECK_EQ(4, script->Run()->Int32Value()); 347 CHECK_EQ(4, script->Run()->Int32Value());
350 } 348 }
351 } 349 }
352 350
353 351
354 TEST(PartialSerialization) { 352 TEST(PartialSerialization) {
355 Serializer::Enable(); 353 Serializer::Enable();
356 v8::V8::Initialize(); 354 v8::V8::Initialize();
357 355
358 v8::Persistent<v8::Context> env = v8::Context::New(); 356 v8::Persistent<v8::Context> env = v8::Context::New();
359 ASSERT(!env.IsEmpty()); 357 ASSERT(!env.IsEmpty());
360 env->Enter(); 358 env->Enter();
361 // Make sure all builtin scripts are cached. 359 // Make sure all builtin scripts are cached.
362 { HandleScope scope; 360 { HandleScope scope;
363 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 361 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
364 Bootstrapper::NativesSourceLookup(i); 362 Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
365 } 363 }
366 } 364 }
367 Heap::CollectAllGarbage(Heap::kForceCompactionMask); 365 HEAP->CollectAllGarbage(Heap::kForceCompactionMask);
368 Heap::CollectAllGarbage(Heap::kForceCompactionMask); 366 HEAP->CollectAllGarbage(Heap::kForceCompactionMask);
369 367
370 Object* raw_foo; 368 Object* raw_foo;
371 { 369 {
372 v8::HandleScope handle_scope; 370 v8::HandleScope handle_scope;
373 v8::Local<v8::String> foo = v8::String::New("foo"); 371 v8::Local<v8::String> foo = v8::String::New("foo");
374 ASSERT(!foo.IsEmpty()); 372 ASSERT(!foo.IsEmpty());
375 raw_foo = *(v8::Utils::OpenHandle(*foo)); 373 raw_foo = *(v8::Utils::OpenHandle(*foo));
376 } 374 }
377 375
378 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; 376 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
418 CHECK_EQ(1, fscanf(fp, "pointer %d\n", &pointer_size)); 416 CHECK_EQ(1, fscanf(fp, "pointer %d\n", &pointer_size));
419 CHECK_EQ(1, fscanf(fp, "data %d\n", &data_size)); 417 CHECK_EQ(1, fscanf(fp, "data %d\n", &data_size));
420 CHECK_EQ(1, fscanf(fp, "code %d\n", &code_size)); 418 CHECK_EQ(1, fscanf(fp, "code %d\n", &code_size));
421 CHECK_EQ(1, fscanf(fp, "map %d\n", &map_size)); 419 CHECK_EQ(1, fscanf(fp, "map %d\n", &map_size));
422 CHECK_EQ(1, fscanf(fp, "cell %d\n", &cell_size)); 420 CHECK_EQ(1, fscanf(fp, "cell %d\n", &cell_size));
423 CHECK_EQ(1, fscanf(fp, "large %d\n", &large_size)); 421 CHECK_EQ(1, fscanf(fp, "large %d\n", &large_size));
424 #ifdef _MSC_VER 422 #ifdef _MSC_VER
425 #undef fscanf 423 #undef fscanf
426 #endif 424 #endif
427 fclose(fp); 425 fclose(fp);
428 Heap::ReserveSpace(new_size, 426 HEAP->ReserveSpace(new_size,
429 pointer_size, 427 pointer_size,
430 data_size, 428 data_size,
431 code_size, 429 code_size,
432 map_size, 430 map_size,
433 cell_size, 431 cell_size,
434 large_size); 432 large_size);
435 } 433 }
436 434
437 435
438 DEPENDENT_TEST(PartialDeserialization, PartialSerialization) { 436 DEPENDENT_TEST(PartialDeserialization, PartialSerialization) {
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
475 TEST(ContextSerialization) { 473 TEST(ContextSerialization) {
476 Serializer::Enable(); 474 Serializer::Enable();
477 v8::V8::Initialize(); 475 v8::V8::Initialize();
478 476
479 v8::Persistent<v8::Context> env = v8::Context::New(); 477 v8::Persistent<v8::Context> env = v8::Context::New();
480 ASSERT(!env.IsEmpty()); 478 ASSERT(!env.IsEmpty());
481 env->Enter(); 479 env->Enter();
482 // Make sure all builtin scripts are cached. 480 // Make sure all builtin scripts are cached.
483 { HandleScope scope; 481 { HandleScope scope;
484 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) { 482 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
485 Bootstrapper::NativesSourceLookup(i); 483 Isolate::Current()->bootstrapper()->NativesSourceLookup(i);
486 } 484 }
487 } 485 }
488 // If we don't do this then we end up with a stray root pointing at the 486 // If we don't do this then we end up with a stray root pointing at the
489 // context even after we have disposed of env. 487 // context even after we have disposed of env.
490 Heap::CollectAllGarbage(Heap::kForceCompactionMask); 488 HEAP->CollectAllGarbage(Heap::kForceCompactionMask);
491 489
492 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; 490 int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
493 Vector<char> startup_name = Vector<char>::New(file_name_length + 1); 491 Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
494 OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file); 492 OS::SNPrintF(startup_name, "%s.startup", FLAG_testing_serialization_file);
495 493
496 env->Exit(); 494 env->Exit();
497 495
498 Object* raw_context = *(v8::Utils::OpenHandle(*env)); 496 Object* raw_context = *(v8::Utils::OpenHandle(*env));
499 497
500 env.Dispose(); 498 env.Dispose();
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
557 555
558 TEST(LinearAllocation) { 556 TEST(LinearAllocation) {
559 v8::V8::Initialize(); 557 v8::V8::Initialize();
560 int new_space_max = 512 * KB; 558 int new_space_max = 512 * KB;
561 int paged_space_max = Page::kMaxHeapObjectSize; 559 int paged_space_max = Page::kMaxHeapObjectSize;
562 560
563 for (int size = 1000; size < 5 * MB; size += size >> 1) { 561 for (int size = 1000; size < 5 * MB; size += size >> 1) {
564 size &= ~8; // Round. 562 size &= ~8; // Round.
565 int new_space_size = (size < new_space_max) ? size : new_space_max; 563 int new_space_size = (size < new_space_max) ? size : new_space_max;
566 int paged_space_size = (size < paged_space_max) ? size : paged_space_max; 564 int paged_space_size = (size < paged_space_max) ? size : paged_space_max;
567 Heap::ReserveSpace( 565 HEAP->ReserveSpace(
568 new_space_size, 566 new_space_size,
569 paged_space_size, // Old pointer space. 567 paged_space_size, // Old pointer space.
570 paged_space_size, // Old data space. 568 paged_space_size, // Old data space.
571 paged_space_size, // Code space. 569 paged_space_size, // Code space.
572 paged_space_size, // Map space. 570 paged_space_size, // Map space.
573 paged_space_size, // Cell space. 571 paged_space_size, // Cell space.
574 size); // Large object space. 572 size); // Large object space.
575 LinearAllocationScope linear_allocation_scope; 573 LinearAllocationScope linear_allocation_scope;
576 const int kSmallFixedArrayLength = 4; 574 const int kSmallFixedArrayLength = 4;
577 const int kSmallFixedArraySize = 575 const int kSmallFixedArraySize =
578 FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize; 576 FixedArray::kHeaderSize + kSmallFixedArrayLength * kPointerSize;
579 const int kSmallStringLength = 16; 577 const int kSmallStringLength = 16;
580 const int kSmallStringSize = 578 const int kSmallStringSize =
581 (SeqAsciiString::kHeaderSize + kSmallStringLength + 579 (SeqAsciiString::kHeaderSize + kSmallStringLength +
582 kObjectAlignmentMask) & ~kObjectAlignmentMask; 580 kObjectAlignmentMask) & ~kObjectAlignmentMask;
583 const int kMapSize = Map::kSize; 581 const int kMapSize = Map::kSize;
584 582
585 Object* new_last = NULL; 583 Object* new_last = NULL;
586 for (int i = 0; 584 for (int i = 0;
587 i + kSmallFixedArraySize <= new_space_size; 585 i + kSmallFixedArraySize <= new_space_size;
588 i += kSmallFixedArraySize) { 586 i += kSmallFixedArraySize) {
589 Object* obj = 587 Object* obj =
590 Heap::AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked(); 588 HEAP->AllocateFixedArray(kSmallFixedArrayLength)->ToObjectChecked();
591 if (new_last != NULL) { 589 if (new_last != NULL) {
592 CHECK(reinterpret_cast<char*>(obj) == 590 CHECK(reinterpret_cast<char*>(obj) ==
593 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize); 591 reinterpret_cast<char*>(new_last) + kSmallFixedArraySize);
594 } 592 }
595 new_last = obj; 593 new_last = obj;
596 } 594 }
597 595
598 Object* pointer_last = NULL; 596 Object* pointer_last = NULL;
599 for (int i = 0; 597 for (int i = 0;
600 i + kSmallFixedArraySize <= paged_space_size; 598 i + kSmallFixedArraySize <= paged_space_size;
601 i += kSmallFixedArraySize) { 599 i += kSmallFixedArraySize) {
602 Object* obj = Heap::AllocateFixedArray(kSmallFixedArrayLength, 600 Object* obj = HEAP->AllocateFixedArray(kSmallFixedArrayLength,
603 TENURED)->ToObjectChecked(); 601 TENURED)->ToObjectChecked();
604 int old_page_fullness = i % Page::kPageSize; 602 int old_page_fullness = i % Page::kPageSize;
605 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize; 603 int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize;
606 if (page_fullness < old_page_fullness || 604 if (page_fullness < old_page_fullness ||
607 page_fullness > Page::kObjectAreaSize) { 605 page_fullness > Page::kObjectAreaSize) {
608 i = RoundUp(i, Page::kPageSize); 606 i = RoundUp(i, Page::kPageSize);
609 pointer_last = NULL; 607 pointer_last = NULL;
610 } 608 }
611 if (pointer_last != NULL) { 609 if (pointer_last != NULL) {
612 CHECK(reinterpret_cast<char*>(obj) == 610 CHECK(reinterpret_cast<char*>(obj) ==
613 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize); 611 reinterpret_cast<char*>(pointer_last) + kSmallFixedArraySize);
614 } 612 }
615 pointer_last = obj; 613 pointer_last = obj;
616 } 614 }
617 615
618 Object* data_last = NULL; 616 Object* data_last = NULL;
619 for (int i = 0; 617 for (int i = 0;
620 i + kSmallStringSize <= paged_space_size; 618 i + kSmallStringSize <= paged_space_size;
621 i += kSmallStringSize) { 619 i += kSmallStringSize) {
622 Object* obj = Heap::AllocateRawAsciiString(kSmallStringLength, 620 Object* obj = HEAP->AllocateRawAsciiString(kSmallStringLength,
623 TENURED)->ToObjectChecked(); 621 TENURED)->ToObjectChecked();
624 int old_page_fullness = i % Page::kPageSize; 622 int old_page_fullness = i % Page::kPageSize;
625 int page_fullness = (i + kSmallStringSize) % Page::kPageSize; 623 int page_fullness = (i + kSmallStringSize) % Page::kPageSize;
626 if (page_fullness < old_page_fullness || 624 if (page_fullness < old_page_fullness ||
627 page_fullness > Page::kObjectAreaSize) { 625 page_fullness > Page::kObjectAreaSize) {
628 i = RoundUp(i, Page::kPageSize); 626 i = RoundUp(i, Page::kPageSize);
629 data_last = NULL; 627 data_last = NULL;
630 } 628 }
631 if (data_last != NULL) { 629 if (data_last != NULL) {
632 CHECK(reinterpret_cast<char*>(obj) == 630 CHECK(reinterpret_cast<char*>(obj) ==
633 reinterpret_cast<char*>(data_last) + kSmallStringSize); 631 reinterpret_cast<char*>(data_last) + kSmallStringSize);
634 } 632 }
635 data_last = obj; 633 data_last = obj;
636 } 634 }
637 635
638 Object* map_last = NULL; 636 Object* map_last = NULL;
639 for (int i = 0; i + kMapSize <= paged_space_size; i += kMapSize) { 637 for (int i = 0; i + kMapSize <= paged_space_size; i += kMapSize) {
640 Object* obj = Heap::AllocateMap(JS_OBJECT_TYPE, 638 Object* obj = HEAP->AllocateMap(JS_OBJECT_TYPE,
641 42 * kPointerSize)->ToObjectChecked(); 639 42 * kPointerSize)->ToObjectChecked();
642 int old_page_fullness = i % Page::kPageSize; 640 int old_page_fullness = i % Page::kPageSize;
643 int page_fullness = (i + kMapSize) % Page::kPageSize; 641 int page_fullness = (i + kMapSize) % Page::kPageSize;
644 if (page_fullness < old_page_fullness || 642 if (page_fullness < old_page_fullness ||
645 page_fullness > Page::kObjectAreaSize) { 643 page_fullness > Page::kObjectAreaSize) {
646 i = RoundUp(i, Page::kPageSize); 644 i = RoundUp(i, Page::kPageSize);
647 map_last = NULL; 645 map_last = NULL;
648 } 646 }
649 if (map_last != NULL) { 647 if (map_last != NULL) {
650 CHECK(reinterpret_cast<char*>(obj) == 648 CHECK(reinterpret_cast<char*>(obj) ==
651 reinterpret_cast<char*>(map_last) + kMapSize); 649 reinterpret_cast<char*>(map_last) + kMapSize);
652 } 650 }
653 map_last = obj; 651 map_last = obj;
654 } 652 }
655 653
656 if (size > Page::kObjectAreaSize) { 654 if (size > Page::kObjectAreaSize) {
657 // Support for reserving space in large object space is not there yet, 655 // Support for reserving space in large object space is not there yet,
658 // but using an always-allocate scope is fine for now. 656 // but using an always-allocate scope is fine for now.
659 AlwaysAllocateScope always; 657 AlwaysAllocateScope always;
660 int large_object_array_length = 658 int large_object_array_length =
661 (size - FixedArray::kHeaderSize) / kPointerSize; 659 (size - FixedArray::kHeaderSize) / kPointerSize;
662 Object* obj = Heap::AllocateFixedArray(large_object_array_length, 660 Object* obj = HEAP->AllocateFixedArray(large_object_array_length,
663 TENURED)->ToObjectChecked(); 661 TENURED)->ToObjectChecked();
664 CHECK(!obj->IsFailure()); 662 CHECK(!obj->IsFailure());
665 } 663 }
666 } 664 }
667 } 665 }
668 666
669 667
670 TEST(TestThatAlwaysSucceeds) { 668 TEST(TestThatAlwaysSucceeds) {
671 } 669 }
672 670
673 671
674 TEST(TestThatAlwaysFails) { 672 TEST(TestThatAlwaysFails) {
675 bool ArtificialFailure = false; 673 bool ArtificialFailure = false;
676 CHECK(ArtificialFailure); 674 CHECK(ArtificialFailure);
677 } 675 }
678 676
679 677
680 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) { 678 DEPENDENT_TEST(DependentTestThatAlwaysFails, TestThatAlwaysSucceeds) {
681 bool ArtificialFailure2 = false; 679 bool ArtificialFailure2 = false;
682 CHECK(ArtificialFailure2); 680 CHECK(ArtificialFailure2);
683 } 681 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698