| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 | 2 |
| 3 #include <stdlib.h> | 3 #include <stdlib.h> |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #include "compilation-cache.h" | 7 #include "compilation-cache.h" |
| 8 #include "execution.h" | 8 #include "execution.h" |
| 9 #include "factory.h" | 9 #include "factory.h" |
| 10 #include "macro-assembler.h" | 10 #include "macro-assembler.h" |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 213 CHECK_EQ(OLD_POINTER_SPACE, | 213 CHECK_EQ(OLD_POINTER_SPACE, |
| 214 Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space()); | 214 Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space()); |
| 215 CHECK(Failure::Exception()->IsFailure()); | 215 CHECK(Failure::Exception()->IsFailure()); |
| 216 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); | 216 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); |
| 217 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); | 217 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); |
| 218 } | 218 } |
| 219 | 219 |
| 220 | 220 |
| 221 TEST(GarbageCollection) { | 221 TEST(GarbageCollection) { |
| 222 InitializeVM(); | 222 InitializeVM(); |
| 223 Isolate* isolate = Isolate::Current(); |
| 224 Heap* heap = isolate->heap(); |
| 225 Factory* factory = isolate->factory(); |
| 223 | 226 |
| 224 v8::HandleScope sc; | 227 v8::HandleScope sc; |
| 225 // Check GC. | 228 // Check GC. |
| 226 HEAP->CollectGarbage(NEW_SPACE); | 229 heap->CollectGarbage(NEW_SPACE); |
| 227 | 230 |
| 228 Handle<String> name = FACTORY->LookupUtf8Symbol("theFunction"); | 231 Handle<String> name = factory->LookupUtf8Symbol("theFunction"); |
| 229 Handle<String> prop_name = FACTORY->LookupUtf8Symbol("theSlot"); | 232 Handle<String> prop_name = factory->LookupUtf8Symbol("theSlot"); |
| 230 Handle<String> prop_namex = FACTORY->LookupUtf8Symbol("theSlotx"); | 233 Handle<String> prop_namex = factory->LookupUtf8Symbol("theSlotx"); |
| 231 Handle<String> obj_name = FACTORY->LookupUtf8Symbol("theObject"); | 234 Handle<String> obj_name = factory->LookupUtf8Symbol("theObject"); |
| 232 | 235 |
| 233 { | 236 { |
| 234 v8::HandleScope inner_scope; | 237 HandleScope inner_scope(isolate); |
| 235 // Allocate a function and keep it in global object's property. | 238 // Allocate a function and keep it in global object's property. |
| 236 Handle<JSFunction> function = | 239 Handle<JSFunction> function = |
| 237 FACTORY->NewFunction(name, FACTORY->undefined_value()); | 240 factory->NewFunction(name, factory->undefined_value()); |
| 238 Handle<Map> initial_map = | 241 Handle<Map> initial_map = |
| 239 FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); | 242 factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); |
| 240 function->set_initial_map(*initial_map); | 243 function->set_initial_map(*initial_map); |
| 241 Isolate::Current()->context()->global_object()->SetProperty( | 244 Isolate::Current()->context()->global_object()->SetProperty( |
| 242 *name, *function, NONE, kNonStrictMode)->ToObjectChecked(); | 245 *name, *function, NONE, kNonStrictMode)->ToObjectChecked(); |
| 243 // Allocate an object. Unrooted after leaving the scope. | 246 // Allocate an object. Unrooted after leaving the scope. |
| 244 Handle<JSObject> obj = FACTORY->NewJSObject(function); | 247 Handle<JSObject> obj = factory->NewJSObject(function); |
| 245 obj->SetProperty( | 248 obj->SetProperty( |
| 246 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked(); | 249 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked(); |
| 247 obj->SetProperty( | 250 obj->SetProperty( |
| 248 *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked(); | 251 *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked(); |
| 249 | 252 |
| 250 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name)); | 253 CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name)); |
| 251 CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex)); | 254 CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex)); |
| 252 } | 255 } |
| 253 | 256 |
| 254 HEAP->CollectGarbage(NEW_SPACE); | 257 heap->CollectGarbage(NEW_SPACE); |
| 255 | 258 |
| 256 // Function should be alive. | 259 // Function should be alive. |
| 257 CHECK(Isolate::Current()->context()->global_object()-> | 260 CHECK(Isolate::Current()->context()->global_object()-> |
| 258 HasLocalProperty(*name)); | 261 HasLocalProperty(*name)); |
| 259 // Check function is retained. | 262 // Check function is retained. |
| 260 Object* func_value = Isolate::Current()->context()->global_object()-> | 263 Object* func_value = Isolate::Current()->context()->global_object()-> |
| 261 GetProperty(*name)->ToObjectChecked(); | 264 GetProperty(*name)->ToObjectChecked(); |
| 262 CHECK(func_value->IsJSFunction()); | 265 CHECK(func_value->IsJSFunction()); |
| 263 Handle<JSFunction> function(JSFunction::cast(func_value)); | 266 Handle<JSFunction> function(JSFunction::cast(func_value)); |
| 264 | 267 |
| 265 { | 268 { |
| 266 HandleScope inner_scope; | 269 HandleScope inner_scope(isolate); |
| 267 // Allocate another object, make it reachable from global. | 270 // Allocate another object, make it reachable from global. |
| 268 Handle<JSObject> obj = FACTORY->NewJSObject(function); | 271 Handle<JSObject> obj = factory->NewJSObject(function); |
| 269 Isolate::Current()->context()->global_object()->SetProperty( | 272 Isolate::Current()->context()->global_object()->SetProperty( |
| 270 *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked(); | 273 *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked(); |
| 271 obj->SetProperty( | 274 obj->SetProperty( |
| 272 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked(); | 275 *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked(); |
| 273 } | 276 } |
| 274 | 277 |
| 275 // After gc, it should survive. | 278 // After gc, it should survive. |
| 276 HEAP->CollectGarbage(NEW_SPACE); | 279 heap->CollectGarbage(NEW_SPACE); |
| 277 | 280 |
| 278 CHECK(Isolate::Current()->context()->global_object()-> | 281 CHECK(Isolate::Current()->context()->global_object()-> |
| 279 HasLocalProperty(*obj_name)); | 282 HasLocalProperty(*obj_name)); |
| 280 CHECK(Isolate::Current()->context()->global_object()-> | 283 CHECK(Isolate::Current()->context()->global_object()-> |
| 281 GetProperty(*obj_name)->ToObjectChecked()->IsJSObject()); | 284 GetProperty(*obj_name)->ToObjectChecked()->IsJSObject()); |
| 282 Object* obj = Isolate::Current()->context()->global_object()-> | 285 Object* obj = Isolate::Current()->context()->global_object()-> |
| 283 GetProperty(*obj_name)->ToObjectChecked(); | 286 GetProperty(*obj_name)->ToObjectChecked(); |
| 284 JSObject* js_obj = JSObject::cast(obj); | 287 JSObject* js_obj = JSObject::cast(obj); |
| 285 CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name)); | 288 CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name)); |
| 286 } | 289 } |
| (...skipping 25 matching lines...) Expand all Loading... |
| 312 | 315 |
| 313 v8::HandleScope scope; | 316 v8::HandleScope scope; |
| 314 const char* name = "Kasper the spunky"; | 317 const char* name = "Kasper the spunky"; |
| 315 Handle<String> string = FACTORY->NewStringFromAscii(CStrVector(name)); | 318 Handle<String> string = FACTORY->NewStringFromAscii(CStrVector(name)); |
| 316 CHECK_EQ(StrLength(name), string->length()); | 319 CHECK_EQ(StrLength(name), string->length()); |
| 317 } | 320 } |
| 318 | 321 |
| 319 | 322 |
| 320 TEST(GlobalHandles) { | 323 TEST(GlobalHandles) { |
| 321 InitializeVM(); | 324 InitializeVM(); |
| 322 GlobalHandles* global_handles = Isolate::Current()->global_handles(); | 325 Isolate* isolate = Isolate::Current(); |
| 326 Heap* heap = isolate->heap(); |
| 327 Factory* factory = isolate->factory(); |
| 328 GlobalHandles* global_handles = isolate->global_handles(); |
| 323 | 329 |
| 324 Handle<Object> h1; | 330 Handle<Object> h1; |
| 325 Handle<Object> h2; | 331 Handle<Object> h2; |
| 326 Handle<Object> h3; | 332 Handle<Object> h3; |
| 327 Handle<Object> h4; | 333 Handle<Object> h4; |
| 328 | 334 |
| 329 { | 335 { |
| 330 HandleScope scope; | 336 HandleScope scope(isolate); |
| 331 | 337 |
| 332 Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk")); | 338 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); |
| 333 Handle<Object> u = FACTORY->NewNumber(1.12344); | 339 Handle<Object> u = factory->NewNumber(1.12344); |
| 334 | 340 |
| 335 h1 = global_handles->Create(*i); | 341 h1 = global_handles->Create(*i); |
| 336 h2 = global_handles->Create(*u); | 342 h2 = global_handles->Create(*u); |
| 337 h3 = global_handles->Create(*i); | 343 h3 = global_handles->Create(*i); |
| 338 h4 = global_handles->Create(*u); | 344 h4 = global_handles->Create(*u); |
| 339 } | 345 } |
| 340 | 346 |
| 341 // after gc, it should survive | 347 // after gc, it should survive |
| 342 HEAP->CollectGarbage(NEW_SPACE); | 348 heap->CollectGarbage(NEW_SPACE); |
| 343 | 349 |
| 344 CHECK((*h1)->IsString()); | 350 CHECK((*h1)->IsString()); |
| 345 CHECK((*h2)->IsHeapNumber()); | 351 CHECK((*h2)->IsHeapNumber()); |
| 346 CHECK((*h3)->IsString()); | 352 CHECK((*h3)->IsString()); |
| 347 CHECK((*h4)->IsHeapNumber()); | 353 CHECK((*h4)->IsHeapNumber()); |
| 348 | 354 |
| 349 CHECK_EQ(*h3, *h1); | 355 CHECK_EQ(*h3, *h1); |
| 350 global_handles->Destroy(h1.location()); | 356 global_handles->Destroy(h1.location()); |
| 351 global_handles->Destroy(h3.location()); | 357 global_handles->Destroy(h3.location()); |
| 352 | 358 |
| 353 CHECK_EQ(*h4, *h2); | 359 CHECK_EQ(*h4, *h2); |
| 354 global_handles->Destroy(h2.location()); | 360 global_handles->Destroy(h2.location()); |
| 355 global_handles->Destroy(h4.location()); | 361 global_handles->Destroy(h4.location()); |
| 356 } | 362 } |
| 357 | 363 |
| 358 | 364 |
| 359 static bool WeakPointerCleared = false; | 365 static bool WeakPointerCleared = false; |
| 360 | 366 |
| 361 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate, | 367 static void TestWeakGlobalHandleCallback(v8::Isolate* isolate, |
| 362 v8::Persistent<v8::Value> handle, | 368 v8::Persistent<v8::Value> handle, |
| 363 void* id) { | 369 void* id) { |
| 364 if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true; | 370 if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true; |
| 365 handle.Dispose(isolate); | 371 handle.Dispose(isolate); |
| 366 } | 372 } |
| 367 | 373 |
| 368 | 374 |
| 369 TEST(WeakGlobalHandlesScavenge) { | 375 TEST(WeakGlobalHandlesScavenge) { |
| 370 InitializeVM(); | 376 InitializeVM(); |
| 371 GlobalHandles* global_handles = Isolate::Current()->global_handles(); | 377 Isolate* isolate = Isolate::Current(); |
| 378 Heap* heap = isolate->heap(); |
| 379 Factory* factory = isolate->factory(); |
| 380 GlobalHandles* global_handles = isolate->global_handles(); |
| 372 | 381 |
| 373 WeakPointerCleared = false; | 382 WeakPointerCleared = false; |
| 374 | 383 |
| 375 Handle<Object> h1; | 384 Handle<Object> h1; |
| 376 Handle<Object> h2; | 385 Handle<Object> h2; |
| 377 | 386 |
| 378 { | 387 { |
| 379 HandleScope scope; | 388 HandleScope scope(isolate); |
| 380 | 389 |
| 381 Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk")); | 390 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); |
| 382 Handle<Object> u = FACTORY->NewNumber(1.12344); | 391 Handle<Object> u = factory->NewNumber(1.12344); |
| 383 | 392 |
| 384 h1 = global_handles->Create(*i); | 393 h1 = global_handles->Create(*i); |
| 385 h2 = global_handles->Create(*u); | 394 h2 = global_handles->Create(*u); |
| 386 } | 395 } |
| 387 | 396 |
| 388 global_handles->MakeWeak(h2.location(), | 397 global_handles->MakeWeak(h2.location(), |
| 389 reinterpret_cast<void*>(1234), | 398 reinterpret_cast<void*>(1234), |
| 390 NULL, | 399 NULL, |
| 391 &TestWeakGlobalHandleCallback); | 400 &TestWeakGlobalHandleCallback); |
| 392 | 401 |
| 393 // Scavenge treats weak pointers as normal roots. | 402 // Scavenge treats weak pointers as normal roots. |
| 394 HEAP->PerformScavenge(); | 403 heap->PerformScavenge(); |
| 395 | 404 |
| 396 CHECK((*h1)->IsString()); | 405 CHECK((*h1)->IsString()); |
| 397 CHECK((*h2)->IsHeapNumber()); | 406 CHECK((*h2)->IsHeapNumber()); |
| 398 | 407 |
| 399 CHECK(!WeakPointerCleared); | 408 CHECK(!WeakPointerCleared); |
| 400 CHECK(!global_handles->IsNearDeath(h2.location())); | 409 CHECK(!global_handles->IsNearDeath(h2.location())); |
| 401 CHECK(!global_handles->IsNearDeath(h1.location())); | 410 CHECK(!global_handles->IsNearDeath(h1.location())); |
| 402 | 411 |
| 403 global_handles->Destroy(h1.location()); | 412 global_handles->Destroy(h1.location()); |
| 404 global_handles->Destroy(h2.location()); | 413 global_handles->Destroy(h2.location()); |
| 405 } | 414 } |
| 406 | 415 |
| 407 | 416 |
| 408 TEST(WeakGlobalHandlesMark) { | 417 TEST(WeakGlobalHandlesMark) { |
| 409 InitializeVM(); | 418 InitializeVM(); |
| 410 GlobalHandles* global_handles = Isolate::Current()->global_handles(); | 419 Isolate* isolate = Isolate::Current(); |
| 420 Heap* heap = isolate->heap(); |
| 421 Factory* factory = isolate->factory(); |
| 422 GlobalHandles* global_handles = isolate->global_handles(); |
| 411 | 423 |
| 412 WeakPointerCleared = false; | 424 WeakPointerCleared = false; |
| 413 | 425 |
| 414 Handle<Object> h1; | 426 Handle<Object> h1; |
| 415 Handle<Object> h2; | 427 Handle<Object> h2; |
| 416 | 428 |
| 417 { | 429 { |
| 418 HandleScope scope; | 430 HandleScope scope(isolate); |
| 419 | 431 |
| 420 Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk")); | 432 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); |
| 421 Handle<Object> u = FACTORY->NewNumber(1.12344); | 433 Handle<Object> u = factory->NewNumber(1.12344); |
| 422 | 434 |
| 423 h1 = global_handles->Create(*i); | 435 h1 = global_handles->Create(*i); |
| 424 h2 = global_handles->Create(*u); | 436 h2 = global_handles->Create(*u); |
| 425 } | 437 } |
| 426 | 438 |
| 427 // Make sure the objects are promoted. | 439 // Make sure the objects are promoted. |
| 428 HEAP->CollectGarbage(OLD_POINTER_SPACE); | 440 heap->CollectGarbage(OLD_POINTER_SPACE); |
| 429 HEAP->CollectGarbage(NEW_SPACE); | 441 heap->CollectGarbage(NEW_SPACE); |
| 430 CHECK(!HEAP->InNewSpace(*h1) && !HEAP->InNewSpace(*h2)); | 442 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); |
| 431 | 443 |
| 432 global_handles->MakeWeak(h2.location(), | 444 global_handles->MakeWeak(h2.location(), |
| 433 reinterpret_cast<void*>(1234), | 445 reinterpret_cast<void*>(1234), |
| 434 NULL, | 446 NULL, |
| 435 &TestWeakGlobalHandleCallback); | 447 &TestWeakGlobalHandleCallback); |
| 436 CHECK(!GlobalHandles::IsNearDeath(h1.location())); | 448 CHECK(!GlobalHandles::IsNearDeath(h1.location())); |
| 437 CHECK(!GlobalHandles::IsNearDeath(h2.location())); | 449 CHECK(!GlobalHandles::IsNearDeath(h2.location())); |
| 438 | 450 |
| 439 // Incremental marking potentially marked handles before they turned weak. | 451 // Incremental marking potentially marked handles before they turned weak. |
| 440 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 452 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 441 | 453 |
| 442 CHECK((*h1)->IsString()); | 454 CHECK((*h1)->IsString()); |
| 443 | 455 |
| 444 CHECK(WeakPointerCleared); | 456 CHECK(WeakPointerCleared); |
| 445 CHECK(!GlobalHandles::IsNearDeath(h1.location())); | 457 CHECK(!GlobalHandles::IsNearDeath(h1.location())); |
| 446 | 458 |
| 447 global_handles->Destroy(h1.location()); | 459 global_handles->Destroy(h1.location()); |
| 448 } | 460 } |
| 449 | 461 |
| 450 | 462 |
| 451 TEST(DeleteWeakGlobalHandle) { | 463 TEST(DeleteWeakGlobalHandle) { |
| 452 InitializeVM(); | 464 InitializeVM(); |
| 453 GlobalHandles* global_handles = Isolate::Current()->global_handles(); | 465 Isolate* isolate = Isolate::Current(); |
| 466 Heap* heap = isolate->heap(); |
| 467 Factory* factory = isolate->factory(); |
| 468 GlobalHandles* global_handles = isolate->global_handles(); |
| 454 | 469 |
| 455 WeakPointerCleared = false; | 470 WeakPointerCleared = false; |
| 456 | 471 |
| 457 Handle<Object> h; | 472 Handle<Object> h; |
| 458 | 473 |
| 459 { | 474 { |
| 460 HandleScope scope; | 475 HandleScope scope(isolate); |
| 461 | 476 |
| 462 Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk")); | 477 Handle<Object> i = factory->NewStringFromAscii(CStrVector("fisk")); |
| 463 h = global_handles->Create(*i); | 478 h = global_handles->Create(*i); |
| 464 } | 479 } |
| 465 | 480 |
| 466 global_handles->MakeWeak(h.location(), | 481 global_handles->MakeWeak(h.location(), |
| 467 reinterpret_cast<void*>(1234), | 482 reinterpret_cast<void*>(1234), |
| 468 NULL, | 483 NULL, |
| 469 &TestWeakGlobalHandleCallback); | 484 &TestWeakGlobalHandleCallback); |
| 470 | 485 |
| 471 // Scanvenge does not recognize weak reference. | 486 // Scanvenge does not recognize weak reference. |
| 472 HEAP->PerformScavenge(); | 487 heap->PerformScavenge(); |
| 473 | 488 |
| 474 CHECK(!WeakPointerCleared); | 489 CHECK(!WeakPointerCleared); |
| 475 | 490 |
| 476 // Mark-compact treats weak reference properly. | 491 // Mark-compact treats weak reference properly. |
| 477 HEAP->CollectGarbage(OLD_POINTER_SPACE); | 492 heap->CollectGarbage(OLD_POINTER_SPACE); |
| 478 | 493 |
| 479 CHECK(WeakPointerCleared); | 494 CHECK(WeakPointerCleared); |
| 480 } | 495 } |
| 481 | 496 |
| 482 | 497 |
| 483 static const char* not_so_random_string_table[] = { | 498 static const char* not_so_random_string_table[] = { |
| 484 "abstract", | 499 "abstract", |
| 485 "boolean", | 500 "boolean", |
| 486 "break", | 501 "break", |
| 487 "byte", | 502 "byte", |
| (...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1146 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); | 1161 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); |
| 1147 CHECK(!function->is_compiled() || function->IsOptimized()); | 1162 CHECK(!function->is_compiled() || function->IsOptimized()); |
| 1148 } | 1163 } |
| 1149 | 1164 |
| 1150 | 1165 |
| 1151 TEST(TestCodeFlushingIncrementalAbort) { | 1166 TEST(TestCodeFlushingIncrementalAbort) { |
| 1152 // If we do not flush code this test is invalid. | 1167 // If we do not flush code this test is invalid. |
| 1153 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return; | 1168 if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return; |
| 1154 i::FLAG_allow_natives_syntax = true; | 1169 i::FLAG_allow_natives_syntax = true; |
| 1155 InitializeVM(); | 1170 InitializeVM(); |
| 1171 Isolate* isolate = Isolate::Current(); |
| 1172 Heap* heap = isolate->heap(); |
| 1156 v8::HandleScope scope; | 1173 v8::HandleScope scope; |
| 1157 const char* source = "function foo() {" | 1174 const char* source = "function foo() {" |
| 1158 " var x = 42;" | 1175 " var x = 42;" |
| 1159 " var y = 42;" | 1176 " var y = 42;" |
| 1160 " var z = x + y;" | 1177 " var z = x + y;" |
| 1161 "};" | 1178 "};" |
| 1162 "foo()"; | 1179 "foo()"; |
| 1163 Handle<String> foo_name = FACTORY->LookupUtf8Symbol("foo"); | 1180 Handle<String> foo_name = FACTORY->LookupUtf8Symbol("foo"); |
| 1164 | 1181 |
| 1165 // This compile will add the code to the compilation cache. | 1182 // This compile will add the code to the compilation cache. |
| 1166 { v8::HandleScope scope; | 1183 { v8::HandleScope scope; |
| 1167 CompileRun(source); | 1184 CompileRun(source); |
| 1168 } | 1185 } |
| 1169 | 1186 |
| 1170 // Check function is compiled. | 1187 // Check function is compiled. |
| 1171 Object* func_value = Isolate::Current()->context()->global_object()-> | 1188 Object* func_value = Isolate::Current()->context()->global_object()-> |
| 1172 GetProperty(*foo_name)->ToObjectChecked(); | 1189 GetProperty(*foo_name)->ToObjectChecked(); |
| 1173 CHECK(func_value->IsJSFunction()); | 1190 CHECK(func_value->IsJSFunction()); |
| 1174 Handle<JSFunction> function(JSFunction::cast(func_value)); | 1191 Handle<JSFunction> function(JSFunction::cast(func_value)); |
| 1175 CHECK(function->shared()->is_compiled()); | 1192 CHECK(function->shared()->is_compiled()); |
| 1176 | 1193 |
| 1177 // The code will survive at least two GCs. | 1194 // The code will survive at least two GCs. |
| 1178 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 1195 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 1179 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 1196 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 1180 CHECK(function->shared()->is_compiled()); | 1197 CHECK(function->shared()->is_compiled()); |
| 1181 | 1198 |
| 1182 // Bump the code age so that flushing is triggered. | 1199 // Bump the code age so that flushing is triggered. |
| 1183 const int kAgingThreshold = 6; | 1200 const int kAgingThreshold = 6; |
| 1184 for (int i = 0; i < kAgingThreshold; i++) { | 1201 for (int i = 0; i < kAgingThreshold; i++) { |
| 1185 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 1202 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 1186 } | 1203 } |
| 1187 | 1204 |
| 1188 // Simulate incremental marking so that the function is enqueued as | 1205 // Simulate incremental marking so that the function is enqueued as |
| 1189 // code flushing candidate. | 1206 // code flushing candidate. |
| 1190 SimulateIncrementalMarking(); | 1207 SimulateIncrementalMarking(); |
| 1191 | 1208 |
| 1192 // Enable the debugger and add a breakpoint while incremental marking | 1209 // Enable the debugger and add a breakpoint while incremental marking |
| 1193 // is running so that incremental marking aborts and code flushing is | 1210 // is running so that incremental marking aborts and code flushing is |
| 1194 // disabled. | 1211 // disabled. |
| 1195 int position = 0; | 1212 int position = 0; |
| 1196 Handle<Object> breakpoint_object(Smi::FromInt(0)); | 1213 Handle<Object> breakpoint_object(Smi::FromInt(0)); |
| 1197 ISOLATE->debug()->SetBreakPoint(function, breakpoint_object, &position); | 1214 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position); |
| 1198 ISOLATE->debug()->ClearAllBreakPoints(); | 1215 isolate->debug()->ClearAllBreakPoints(); |
| 1199 | 1216 |
| 1200 // Force optimization now that code flushing is disabled. | 1217 // Force optimization now that code flushing is disabled. |
| 1201 { v8::HandleScope scope; | 1218 { v8::HandleScope scope; |
| 1202 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); | 1219 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); |
| 1203 } | 1220 } |
| 1204 | 1221 |
| 1205 // Simulate one final GC to make sure the candidate queue is sane. | 1222 // Simulate one final GC to make sure the candidate queue is sane. |
| 1206 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 1223 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 1207 CHECK(function->shared()->is_compiled() || !function->IsOptimized()); | 1224 CHECK(function->shared()->is_compiled() || !function->IsOptimized()); |
| 1208 CHECK(function->is_compiled() || !function->IsOptimized()); | 1225 CHECK(function->is_compiled() || !function->IsOptimized()); |
| 1209 } | 1226 } |
| 1210 | 1227 |
| 1211 | 1228 |
| 1212 // Count the number of native contexts in the weak list of native contexts. | 1229 // Count the number of native contexts in the weak list of native contexts. |
| 1213 int CountNativeContexts() { | 1230 int CountNativeContexts() { |
| 1214 int count = 0; | 1231 int count = 0; |
| 1215 Object* object = HEAP->native_contexts_list(); | 1232 Object* object = HEAP->native_contexts_list(); |
| 1216 while (!object->IsUndefined()) { | 1233 while (!object->IsUndefined()) { |
| (...skipping 20 matching lines...) Expand all Loading... |
| 1237 | 1254 |
| 1238 TEST(TestInternalWeakLists) { | 1255 TEST(TestInternalWeakLists) { |
| 1239 v8::V8::Initialize(); | 1256 v8::V8::Initialize(); |
| 1240 | 1257 |
| 1241 // Some flags turn Scavenge collections into Mark-sweep collections | 1258 // Some flags turn Scavenge collections into Mark-sweep collections |
| 1242 // and hence are incompatible with this test case. | 1259 // and hence are incompatible with this test case. |
| 1243 if (FLAG_gc_global || FLAG_stress_compaction) return; | 1260 if (FLAG_gc_global || FLAG_stress_compaction) return; |
| 1244 | 1261 |
| 1245 static const int kNumTestContexts = 10; | 1262 static const int kNumTestContexts = 10; |
| 1246 | 1263 |
| 1264 Isolate* isolate = Isolate::Current(); |
| 1265 Heap* heap = isolate->heap(); |
| 1247 v8::HandleScope scope; | 1266 v8::HandleScope scope; |
| 1248 v8::Persistent<v8::Context> ctx[kNumTestContexts]; | 1267 v8::Persistent<v8::Context> ctx[kNumTestContexts]; |
| 1249 | 1268 |
| 1250 CHECK_EQ(0, CountNativeContexts()); | 1269 CHECK_EQ(0, CountNativeContexts()); |
| 1251 | 1270 |
| 1252 // Create a number of global contests which gets linked together. | 1271 // Create a number of global contests which gets linked together. |
| 1253 for (int i = 0; i < kNumTestContexts; i++) { | 1272 for (int i = 0; i < kNumTestContexts; i++) { |
| 1254 ctx[i] = v8::Context::New(); | 1273 ctx[i] = v8::Context::New(); |
| 1255 | 1274 |
| 1256 bool opt = (FLAG_always_opt && i::V8::UseCrankshaft()); | 1275 bool opt = (FLAG_always_opt && i::V8::UseCrankshaft()); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1283 // Remove function f1, and | 1302 // Remove function f1, and |
| 1284 CompileRun("f1=null"); | 1303 CompileRun("f1=null"); |
| 1285 | 1304 |
| 1286 // Scavenge treats these references as strong. | 1305 // Scavenge treats these references as strong. |
| 1287 for (int j = 0; j < 10; j++) { | 1306 for (int j = 0; j < 10; j++) { |
| 1288 HEAP->PerformScavenge(); | 1307 HEAP->PerformScavenge(); |
| 1289 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i])); | 1308 CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i])); |
| 1290 } | 1309 } |
| 1291 | 1310 |
| 1292 // Mark compact handles the weak references. | 1311 // Mark compact handles the weak references. |
| 1293 ISOLATE->compilation_cache()->Clear(); | 1312 isolate->compilation_cache()->Clear(); |
| 1294 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 1313 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 1295 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); | 1314 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); |
| 1296 | 1315 |
| 1297 // Get rid of f3 and f5 in the same way. | 1316 // Get rid of f3 and f5 in the same way. |
| 1298 CompileRun("f3=null"); | 1317 CompileRun("f3=null"); |
| 1299 for (int j = 0; j < 10; j++) { | 1318 for (int j = 0; j < 10; j++) { |
| 1300 HEAP->PerformScavenge(); | 1319 HEAP->PerformScavenge(); |
| 1301 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); | 1320 CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i])); |
| 1302 } | 1321 } |
| 1303 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 1322 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
| 1304 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i])); | 1323 CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i])); |
| (...skipping 1155 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2460 "} catch (e) { " | 2479 "} catch (e) { " |
| 2461 " error = e; " | 2480 " error = e; " |
| 2462 "} "; | 2481 "} "; |
| 2463 ReleaseStackTraceDataTest(source1); | 2482 ReleaseStackTraceDataTest(source1); |
| 2464 ReleaseStackTraceDataTest(source2); | 2483 ReleaseStackTraceDataTest(source2); |
| 2465 } | 2484 } |
| 2466 | 2485 |
| 2467 | 2486 |
| 2468 TEST(Regression144230) { | 2487 TEST(Regression144230) { |
| 2469 InitializeVM(); | 2488 InitializeVM(); |
| 2489 Isolate* isolate = Isolate::Current(); |
| 2470 v8::HandleScope scope; | 2490 v8::HandleScope scope; |
| 2471 | 2491 |
| 2472 // First make sure that the uninitialized CallIC stub is on a single page | 2492 // First make sure that the uninitialized CallIC stub is on a single page |
| 2473 // that will later be selected as an evacuation candidate. | 2493 // that will later be selected as an evacuation candidate. |
| 2474 { | 2494 { |
| 2475 v8::HandleScope inner_scope; | 2495 v8::HandleScope inner_scope; |
| 2476 AlwaysAllocateScope always_allocate; | 2496 AlwaysAllocateScope always_allocate; |
| 2477 SimulateFullSpace(HEAP->code_space()); | 2497 SimulateFullSpace(HEAP->code_space()); |
| 2478 ISOLATE->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET); | 2498 isolate->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET); |
| 2479 } | 2499 } |
| 2480 | 2500 |
| 2481 // Second compile a CallIC and execute it once so that it gets patched to | 2501 // Second compile a CallIC and execute it once so that it gets patched to |
| 2482 // the pre-monomorphic stub. These code objects are on yet another page. | 2502 // the pre-monomorphic stub. These code objects are on yet another page. |
| 2483 { | 2503 { |
| 2484 v8::HandleScope inner_scope; | 2504 v8::HandleScope inner_scope; |
| 2485 AlwaysAllocateScope always_allocate; | 2505 AlwaysAllocateScope always_allocate; |
| 2486 SimulateFullSpace(HEAP->code_space()); | 2506 SimulateFullSpace(HEAP->code_space()); |
| 2487 CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};" | 2507 CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};" |
| 2488 "function call() { o.f(1,2,3,4,5,6,7,8,9); };" | 2508 "function call() { o.f(1,2,3,4,5,6,7,8,9); };" |
| 2489 "call();"); | 2509 "call();"); |
| 2490 } | 2510 } |
| 2491 | 2511 |
| 2492 // Third we fill up the last page of the code space so that it does not get | 2512 // Third we fill up the last page of the code space so that it does not get |
| 2493 // chosen as an evacuation candidate. | 2513 // chosen as an evacuation candidate. |
| 2494 { | 2514 { |
| 2495 v8::HandleScope inner_scope; | 2515 v8::HandleScope inner_scope; |
| 2496 AlwaysAllocateScope always_allocate; | 2516 AlwaysAllocateScope always_allocate; |
| 2497 CompileRun("for (var i = 0; i < 2000; i++) {" | 2517 CompileRun("for (var i = 0; i < 2000; i++) {" |
| 2498 " eval('function f' + i + '() { return ' + i +'; };' +" | 2518 " eval('function f' + i + '() { return ' + i +'; };' +" |
| 2499 " 'f' + i + '();');" | 2519 " 'f' + i + '();');" |
| 2500 "}"); | 2520 "}"); |
| 2501 } | 2521 } |
| 2502 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2522 HEAP->CollectAllGarbage(Heap::kNoGCFlags); |
| 2503 | 2523 |
| 2504 // Fourth is the tricky part. Make sure the code containing the CallIC is | 2524 // Fourth is the tricky part. Make sure the code containing the CallIC is |
| 2505 // visited first without clearing the IC. The shared function info is then | 2525 // visited first without clearing the IC. The shared function info is then |
| 2506 // visited later, causing the CallIC to be cleared. | 2526 // visited later, causing the CallIC to be cleared. |
| 2507 Handle<String> name = FACTORY->LookupUtf8Symbol("call"); | 2527 Handle<String> name = FACTORY->LookupUtf8Symbol("call"); |
| 2508 Handle<GlobalObject> global(ISOLATE->context()->global_object()); | 2528 Handle<GlobalObject> global(isolate->context()->global_object()); |
| 2509 MaybeObject* maybe_call = global->GetProperty(*name); | 2529 MaybeObject* maybe_call = global->GetProperty(*name); |
| 2510 JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked()); | 2530 JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked()); |
| 2511 USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode)); | 2531 USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode)); |
| 2512 ISOLATE->compilation_cache()->Clear(); | 2532 isolate->compilation_cache()->Clear(); |
| 2513 call->shared()->set_ic_age(HEAP->global_ic_age() + 1); | 2533 call->shared()->set_ic_age(HEAP->global_ic_age() + 1); |
| 2514 Handle<Object> call_code(call->code()); | 2534 Handle<Object> call_code(call->code()); |
| 2515 Handle<Object> call_function(call); | 2535 Handle<Object> call_function(call); |
| 2516 | 2536 |
| 2517 // Now we are ready to mess up the heap. | 2537 // Now we are ready to mess up the heap. |
| 2518 HEAP->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); | 2538 HEAP->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); |
| 2519 | 2539 |
| 2520 // Either heap verification caught the problem already or we go kaboom once | 2540 // Either heap verification caught the problem already or we go kaboom once |
| 2521 // the CallIC is executed the next time. | 2541 // the CallIC is executed the next time. |
| 2522 USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode)); | 2542 USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode)); |
| 2523 CompileRun("call();"); | 2543 CompileRun("call();"); |
| 2524 } | 2544 } |
| 2525 | 2545 |
| 2526 | 2546 |
| 2527 TEST(Regress159140) { | 2547 TEST(Regress159140) { |
| 2528 i::FLAG_allow_natives_syntax = true; | 2548 i::FLAG_allow_natives_syntax = true; |
| 2529 i::FLAG_flush_code_incrementally = true; | 2549 i::FLAG_flush_code_incrementally = true; |
| 2530 InitializeVM(); | 2550 InitializeVM(); |
| 2551 Isolate* isolate = Isolate::Current(); |
| 2552 Heap* heap = isolate->heap(); |
| 2531 v8::HandleScope scope; | 2553 v8::HandleScope scope; |
| 2532 | 2554 |
| 2533 // Perform one initial GC to enable code flushing. | 2555 // Perform one initial GC to enable code flushing. |
| 2534 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 2556 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 2535 | 2557 |
| 2536 // Prepare several closures that are all eligible for code flushing | 2558 // Prepare several closures that are all eligible for code flushing |
| 2537 // because all reachable ones are not optimized. Make sure that the | 2559 // because all reachable ones are not optimized. Make sure that the |
| 2538 // optimized code object is directly reachable through a handle so | 2560 // optimized code object is directly reachable through a handle so |
| 2539 // that it is marked black during incremental marking. | 2561 // that it is marked black during incremental marking. |
| 2540 Handle<Code> code; | 2562 Handle<Code> code; |
| 2541 { | 2563 { |
| 2542 HandleScope inner_scope; | 2564 HandleScope inner_scope(isolate); |
| 2543 CompileRun("function h(x) {}" | 2565 CompileRun("function h(x) {}" |
| 2544 "function mkClosure() {" | 2566 "function mkClosure() {" |
| 2545 " return function(x) { return x + 1; };" | 2567 " return function(x) { return x + 1; };" |
| 2546 "}" | 2568 "}" |
| 2547 "var f = mkClosure();" | 2569 "var f = mkClosure();" |
| 2548 "var g = mkClosure();" | 2570 "var g = mkClosure();" |
| 2549 "f(1); f(2);" | 2571 "f(1); f(2);" |
| 2550 "g(1); g(2);" | 2572 "g(1); g(2);" |
| 2551 "h(1); h(2);" | 2573 "h(1); h(2);" |
| 2552 "%OptimizeFunctionOnNextCall(f); f(3);" | 2574 "%OptimizeFunctionOnNextCall(f); f(3);" |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2570 } | 2592 } |
| 2571 | 2593 |
| 2572 code = inner_scope.CloseAndEscape(Handle<Code>(f->code())); | 2594 code = inner_scope.CloseAndEscape(Handle<Code>(f->code())); |
| 2573 } | 2595 } |
| 2574 | 2596 |
| 2575 // Simulate incremental marking so that the functions are enqueued as | 2597 // Simulate incremental marking so that the functions are enqueued as |
| 2576 // code flushing candidates. Then optimize one function. Finally | 2598 // code flushing candidates. Then optimize one function. Finally |
| 2577 // finish the GC to complete code flushing. | 2599 // finish the GC to complete code flushing. |
| 2578 SimulateIncrementalMarking(); | 2600 SimulateIncrementalMarking(); |
| 2579 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); | 2601 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); |
| 2580 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2602 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2581 | 2603 |
| 2582 // Unoptimized code is missing and the deoptimizer will go ballistic. | 2604 // Unoptimized code is missing and the deoptimizer will go ballistic. |
| 2583 CompileRun("g('bozo');"); | 2605 CompileRun("g('bozo');"); |
| 2584 } | 2606 } |
| 2585 | 2607 |
| 2586 | 2608 |
| 2587 TEST(Regress165495) { | 2609 TEST(Regress165495) { |
| 2588 i::FLAG_allow_natives_syntax = true; | 2610 i::FLAG_allow_natives_syntax = true; |
| 2589 i::FLAG_flush_code_incrementally = true; | 2611 i::FLAG_flush_code_incrementally = true; |
| 2590 InitializeVM(); | 2612 InitializeVM(); |
| 2613 Isolate* isolate = Isolate::Current(); |
| 2614 Heap* heap = isolate->heap(); |
| 2591 v8::HandleScope scope; | 2615 v8::HandleScope scope; |
| 2592 | 2616 |
| 2593 // Perform one initial GC to enable code flushing. | 2617 // Perform one initial GC to enable code flushing. |
| 2594 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 2618 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 2595 | 2619 |
| 2596 // Prepare an optimized closure that the optimized code map will get | 2620 // Prepare an optimized closure that the optimized code map will get |
| 2597 // populated. Then age the unoptimized code to trigger code flushing | 2621 // populated. Then age the unoptimized code to trigger code flushing |
| 2598 // but make sure the optimized code is unreachable. | 2622 // but make sure the optimized code is unreachable. |
| 2599 { | 2623 { |
| 2600 HandleScope inner_scope; | 2624 HandleScope inner_scope(isolate); |
| 2601 CompileRun("function mkClosure() {" | 2625 CompileRun("function mkClosure() {" |
| 2602 " return function(x) { return x + 1; };" | 2626 " return function(x) { return x + 1; };" |
| 2603 "}" | 2627 "}" |
| 2604 "var f = mkClosure();" | 2628 "var f = mkClosure();" |
| 2605 "f(1); f(2);" | 2629 "f(1); f(2);" |
| 2606 "%OptimizeFunctionOnNextCall(f); f(3);"); | 2630 "%OptimizeFunctionOnNextCall(f); f(3);"); |
| 2607 | 2631 |
| 2608 Handle<JSFunction> f = | 2632 Handle<JSFunction> f = |
| 2609 v8::Utils::OpenHandle( | 2633 v8::Utils::OpenHandle( |
| 2610 *v8::Handle<v8::Function>::Cast( | 2634 *v8::Handle<v8::Function>::Cast( |
| 2611 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); | 2635 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); |
| 2612 CHECK(f->is_compiled()); | 2636 CHECK(f->is_compiled()); |
| 2613 const int kAgingThreshold = 6; | 2637 const int kAgingThreshold = 6; |
| 2614 for (int i = 0; i < kAgingThreshold; i++) { | 2638 for (int i = 0; i < kAgingThreshold; i++) { |
| 2615 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 2639 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 2616 } | 2640 } |
| 2617 | 2641 |
| 2618 CompileRun("f = null;"); | 2642 CompileRun("f = null;"); |
| 2619 } | 2643 } |
| 2620 | 2644 |
| 2621 // Simulate incremental marking so that unoptimized code is flushed | 2645 // Simulate incremental marking so that unoptimized code is flushed |
| 2622 // even though it still is cached in the optimized code map. | 2646 // even though it still is cached in the optimized code map. |
| 2623 SimulateIncrementalMarking(); | 2647 SimulateIncrementalMarking(); |
| 2624 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2648 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2625 | 2649 |
| 2626 // Make a new closure that will get code installed from the code map. | 2650 // Make a new closure that will get code installed from the code map. |
| 2627 // Unoptimized code is missing and the deoptimizer will go ballistic. | 2651 // Unoptimized code is missing and the deoptimizer will go ballistic. |
| 2628 CompileRun("var g = mkClosure(); g('bozo');"); | 2652 CompileRun("var g = mkClosure(); g('bozo');"); |
| 2629 } | 2653 } |
| 2630 | 2654 |
| 2631 | 2655 |
| 2632 TEST(Regress169209) { | 2656 TEST(Regress169209) { |
| 2633 i::FLAG_stress_compaction = false; | 2657 i::FLAG_stress_compaction = false; |
| 2634 i::FLAG_allow_natives_syntax = true; | 2658 i::FLAG_allow_natives_syntax = true; |
| 2635 i::FLAG_flush_code_incrementally = true; | 2659 i::FLAG_flush_code_incrementally = true; |
| 2636 InitializeVM(); | 2660 InitializeVM(); |
| 2661 Isolate* isolate = Isolate::Current(); |
| 2662 Heap* heap = isolate->heap(); |
| 2637 v8::HandleScope scope; | 2663 v8::HandleScope scope; |
| 2638 | 2664 |
| 2639 // Perform one initial GC to enable code flushing. | 2665 // Perform one initial GC to enable code flushing. |
| 2640 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 2666 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 2641 | 2667 |
| 2642 // Prepare a shared function info eligible for code flushing for which | 2668 // Prepare a shared function info eligible for code flushing for which |
| 2643 // the unoptimized code will be replaced during optimization. | 2669 // the unoptimized code will be replaced during optimization. |
| 2644 Handle<SharedFunctionInfo> shared1; | 2670 Handle<SharedFunctionInfo> shared1; |
| 2645 { | 2671 { |
| 2646 HandleScope inner_scope; | 2672 HandleScope inner_scope(isolate); |
| 2647 CompileRun("function f() { return 'foobar'; }" | 2673 CompileRun("function f() { return 'foobar'; }" |
| 2648 "function g(x) { if (x) f(); }" | 2674 "function g(x) { if (x) f(); }" |
| 2649 "f();" | 2675 "f();" |
| 2650 "g(false);" | 2676 "g(false);" |
| 2651 "g(false);"); | 2677 "g(false);"); |
| 2652 | 2678 |
| 2653 Handle<JSFunction> f = | 2679 Handle<JSFunction> f = |
| 2654 v8::Utils::OpenHandle( | 2680 v8::Utils::OpenHandle( |
| 2655 *v8::Handle<v8::Function>::Cast( | 2681 *v8::Handle<v8::Function>::Cast( |
| 2656 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); | 2682 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); |
| 2657 CHECK(f->is_compiled()); | 2683 CHECK(f->is_compiled()); |
| 2658 const int kAgingThreshold = 6; | 2684 const int kAgingThreshold = 6; |
| 2659 for (int i = 0; i < kAgingThreshold; i++) { | 2685 for (int i = 0; i < kAgingThreshold; i++) { |
| 2660 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 2686 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 2661 } | 2687 } |
| 2662 | 2688 |
| 2663 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), ISOLATE)); | 2689 shared1 = inner_scope.CloseAndEscape(handle(f->shared(), isolate)); |
| 2664 } | 2690 } |
| 2665 | 2691 |
| 2666 // Prepare a shared function info eligible for code flushing that will | 2692 // Prepare a shared function info eligible for code flushing that will |
| 2667 // represent the dangling tail of the candidate list. | 2693 // represent the dangling tail of the candidate list. |
| 2668 Handle<SharedFunctionInfo> shared2; | 2694 Handle<SharedFunctionInfo> shared2; |
| 2669 { | 2695 { |
| 2670 HandleScope inner_scope; | 2696 HandleScope inner_scope(isolate); |
| 2671 CompileRun("function flushMe() { return 0; }" | 2697 CompileRun("function flushMe() { return 0; }" |
| 2672 "flushMe(1);"); | 2698 "flushMe(1);"); |
| 2673 | 2699 |
| 2674 Handle<JSFunction> f = | 2700 Handle<JSFunction> f = |
| 2675 v8::Utils::OpenHandle( | 2701 v8::Utils::OpenHandle( |
| 2676 *v8::Handle<v8::Function>::Cast( | 2702 *v8::Handle<v8::Function>::Cast( |
| 2677 v8::Context::GetCurrent()->Global()->Get(v8_str("flushMe")))); | 2703 v8::Context::GetCurrent()->Global()->Get(v8_str("flushMe")))); |
| 2678 CHECK(f->is_compiled()); | 2704 CHECK(f->is_compiled()); |
| 2679 const int kAgingThreshold = 6; | 2705 const int kAgingThreshold = 6; |
| 2680 for (int i = 0; i < kAgingThreshold; i++) { | 2706 for (int i = 0; i < kAgingThreshold; i++) { |
| 2681 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 2707 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 2682 } | 2708 } |
| 2683 | 2709 |
| 2684 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), ISOLATE)); | 2710 shared2 = inner_scope.CloseAndEscape(handle(f->shared(), isolate)); |
| 2685 } | 2711 } |
| 2686 | 2712 |
| 2687 // Simulate incremental marking and collect code flushing candidates. | 2713 // Simulate incremental marking and collect code flushing candidates. |
| 2688 SimulateIncrementalMarking(); | 2714 SimulateIncrementalMarking(); |
| 2689 CHECK(shared1->code()->gc_metadata() != NULL); | 2715 CHECK(shared1->code()->gc_metadata() != NULL); |
| 2690 | 2716 |
| 2691 // Optimize function and make sure the unoptimized code is replaced. | 2717 // Optimize function and make sure the unoptimized code is replaced. |
| 2692 #ifdef DEBUG | 2718 #ifdef DEBUG |
| 2693 FLAG_stop_at = "f"; | 2719 FLAG_stop_at = "f"; |
| 2694 #endif | 2720 #endif |
| 2695 CompileRun("%OptimizeFunctionOnNextCall(g);" | 2721 CompileRun("%OptimizeFunctionOnNextCall(g);" |
| 2696 "g(false);"); | 2722 "g(false);"); |
| 2697 | 2723 |
| 2698 // Finish garbage collection cycle. | 2724 // Finish garbage collection cycle. |
| 2699 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2725 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2700 CHECK(shared1->code()->gc_metadata() == NULL); | 2726 CHECK(shared1->code()->gc_metadata() == NULL); |
| 2701 } | 2727 } |
| 2702 | 2728 |
| 2703 | 2729 |
| 2704 // Helper function that simulates a fill new-space in the heap. | 2730 // Helper function that simulates a fill new-space in the heap. |
| 2705 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, | 2731 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, |
| 2706 int extra_bytes) { | 2732 int extra_bytes) { |
| 2707 int space_remaining = static_cast<int>( | 2733 int space_remaining = static_cast<int>( |
| 2708 *space->allocation_limit_address() - *space->allocation_top_address()); | 2734 *space->allocation_limit_address() - *space->allocation_top_address()); |
| 2709 CHECK(space_remaining >= extra_bytes); | 2735 CHECK(space_remaining >= extra_bytes); |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2784 v8::Script::Compile(mote_code_string)->Run(); | 2810 v8::Script::Compile(mote_code_string)->Run(); |
| 2785 } | 2811 } |
| 2786 | 2812 |
| 2787 | 2813 |
| 2788 TEST(Regress168801) { | 2814 TEST(Regress168801) { |
| 2789 i::FLAG_always_compact = true; | 2815 i::FLAG_always_compact = true; |
| 2790 i::FLAG_cache_optimized_code = false; | 2816 i::FLAG_cache_optimized_code = false; |
| 2791 i::FLAG_allow_natives_syntax = true; | 2817 i::FLAG_allow_natives_syntax = true; |
| 2792 i::FLAG_flush_code_incrementally = true; | 2818 i::FLAG_flush_code_incrementally = true; |
| 2793 InitializeVM(); | 2819 InitializeVM(); |
| 2820 Isolate* isolate = Isolate::Current(); |
| 2821 Heap* heap = isolate->heap(); |
| 2794 v8::HandleScope scope; | 2822 v8::HandleScope scope; |
| 2795 | 2823 |
| 2796 // Perform one initial GC to enable code flushing. | 2824 // Perform one initial GC to enable code flushing. |
| 2797 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 2825 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 2798 | 2826 |
| 2799 // Ensure the code ends up on an evacuation candidate. | 2827 // Ensure the code ends up on an evacuation candidate. |
| 2800 SimulateFullSpace(HEAP->code_space()); | 2828 SimulateFullSpace(heap->code_space()); |
| 2801 | 2829 |
| 2802 // Prepare an unoptimized function that is eligible for code flushing. | 2830 // Prepare an unoptimized function that is eligible for code flushing. |
| 2803 Handle<JSFunction> function; | 2831 Handle<JSFunction> function; |
| 2804 { | 2832 { |
| 2805 HandleScope inner_scope; | 2833 HandleScope inner_scope(isolate); |
| 2806 CompileRun("function mkClosure() {" | 2834 CompileRun("function mkClosure() {" |
| 2807 " return function(x) { return x + 1; };" | 2835 " return function(x) { return x + 1; };" |
| 2808 "}" | 2836 "}" |
| 2809 "var f = mkClosure();" | 2837 "var f = mkClosure();" |
| 2810 "f(1); f(2);"); | 2838 "f(1); f(2);"); |
| 2811 | 2839 |
| 2812 Handle<JSFunction> f = | 2840 Handle<JSFunction> f = |
| 2813 v8::Utils::OpenHandle( | 2841 v8::Utils::OpenHandle( |
| 2814 *v8::Handle<v8::Function>::Cast( | 2842 *v8::Handle<v8::Function>::Cast( |
| 2815 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); | 2843 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); |
| 2816 CHECK(f->is_compiled()); | 2844 CHECK(f->is_compiled()); |
| 2817 const int kAgingThreshold = 6; | 2845 const int kAgingThreshold = 6; |
| 2818 for (int i = 0; i < kAgingThreshold; i++) { | 2846 for (int i = 0; i < kAgingThreshold; i++) { |
| 2819 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 2847 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 2820 } | 2848 } |
| 2821 | 2849 |
| 2822 function = inner_scope.CloseAndEscape(handle(*f, ISOLATE)); | 2850 function = inner_scope.CloseAndEscape(handle(*f, isolate)); |
| 2823 } | 2851 } |
| 2824 | 2852 |
| 2825 // Simulate incremental marking so that unoptimized function is enqueued as a | 2853 // Simulate incremental marking so that unoptimized function is enqueued as a |
| 2826 // candidate for code flushing. The shared function info however will not be | 2854 // candidate for code flushing. The shared function info however will not be |
| 2827 // explicitly enqueued. | 2855 // explicitly enqueued. |
| 2828 SimulateIncrementalMarking(); | 2856 SimulateIncrementalMarking(); |
| 2829 | 2857 |
| 2830 // Now optimize the function so that it is taken off the candidate list. | 2858 // Now optimize the function so that it is taken off the candidate list. |
| 2831 { | 2859 { |
| 2832 HandleScope inner_scope; | 2860 HandleScope inner_scope(isolate); |
| 2833 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);"); | 2861 CompileRun("%OptimizeFunctionOnNextCall(f); f(3);"); |
| 2834 } | 2862 } |
| 2835 | 2863 |
| 2836 // This cycle will bust the heap and subsequent cycles will go ballistic. | 2864 // This cycle will bust the heap and subsequent cycles will go ballistic. |
| 2837 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2865 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2838 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2866 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2839 } | 2867 } |
| 2840 | 2868 |
| 2841 | 2869 |
| 2842 TEST(Regress173458) { | 2870 TEST(Regress173458) { |
| 2843 i::FLAG_always_compact = true; | 2871 i::FLAG_always_compact = true; |
| 2844 i::FLAG_cache_optimized_code = false; | 2872 i::FLAG_cache_optimized_code = false; |
| 2845 i::FLAG_allow_natives_syntax = true; | 2873 i::FLAG_allow_natives_syntax = true; |
| 2846 i::FLAG_flush_code_incrementally = true; | 2874 i::FLAG_flush_code_incrementally = true; |
| 2847 InitializeVM(); | 2875 InitializeVM(); |
| 2876 Isolate* isolate = Isolate::Current(); |
| 2877 Heap* heap = isolate->heap(); |
| 2848 v8::HandleScope scope; | 2878 v8::HandleScope scope; |
| 2849 | 2879 |
| 2850 // Perform one initial GC to enable code flushing. | 2880 // Perform one initial GC to enable code flushing. |
| 2851 HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); | 2881 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); |
| 2852 | 2882 |
| 2853 // Ensure the code ends up on an evacuation candidate. | 2883 // Ensure the code ends up on an evacuation candidate. |
| 2854 SimulateFullSpace(HEAP->code_space()); | 2884 SimulateFullSpace(heap->code_space()); |
| 2855 | 2885 |
| 2856 // Prepare an unoptimized function that is eligible for code flushing. | 2886 // Prepare an unoptimized function that is eligible for code flushing. |
| 2857 Handle<JSFunction> function; | 2887 Handle<JSFunction> function; |
| 2858 { | 2888 { |
| 2859 HandleScope inner_scope; | 2889 HandleScope inner_scope(isolate); |
| 2860 CompileRun("function mkClosure() {" | 2890 CompileRun("function mkClosure() {" |
| 2861 " return function(x) { return x + 1; };" | 2891 " return function(x) { return x + 1; };" |
| 2862 "}" | 2892 "}" |
| 2863 "var f = mkClosure();" | 2893 "var f = mkClosure();" |
| 2864 "f(1); f(2);"); | 2894 "f(1); f(2);"); |
| 2865 | 2895 |
| 2866 Handle<JSFunction> f = | 2896 Handle<JSFunction> f = |
| 2867 v8::Utils::OpenHandle( | 2897 v8::Utils::OpenHandle( |
| 2868 *v8::Handle<v8::Function>::Cast( | 2898 *v8::Handle<v8::Function>::Cast( |
| 2869 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); | 2899 v8::Context::GetCurrent()->Global()->Get(v8_str("f")))); |
| 2870 CHECK(f->is_compiled()); | 2900 CHECK(f->is_compiled()); |
| 2871 const int kAgingThreshold = 6; | 2901 const int kAgingThreshold = 6; |
| 2872 for (int i = 0; i < kAgingThreshold; i++) { | 2902 for (int i = 0; i < kAgingThreshold; i++) { |
| 2873 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); | 2903 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); |
| 2874 } | 2904 } |
| 2875 | 2905 |
| 2876 function = inner_scope.CloseAndEscape(handle(*f, ISOLATE)); | 2906 function = inner_scope.CloseAndEscape(handle(*f, isolate)); |
| 2877 } | 2907 } |
| 2878 | 2908 |
| 2879 // Simulate incremental marking so that unoptimized function is enqueued as a | 2909 // Simulate incremental marking so that unoptimized function is enqueued as a |
| 2880 // candidate for code flushing. The shared function info however will not be | 2910 // candidate for code flushing. The shared function info however will not be |
| 2881 // explicitly enqueued. | 2911 // explicitly enqueued. |
| 2882 SimulateIncrementalMarking(); | 2912 SimulateIncrementalMarking(); |
| 2883 | 2913 |
| 2884 // Now enable the debugger which in turn will disable code flushing. | 2914 // Now enable the debugger which in turn will disable code flushing. |
| 2885 CHECK(ISOLATE->debug()->Load()); | 2915 CHECK(isolate->debug()->Load()); |
| 2886 | 2916 |
| 2887 // This cycle will bust the heap and subsequent cycles will go ballistic. | 2917 // This cycle will bust the heap and subsequent cycles will go ballistic. |
| 2888 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2918 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2889 HEAP->CollectAllGarbage(Heap::kNoGCFlags); | 2919 heap->CollectAllGarbage(Heap::kNoGCFlags); |
| 2890 } | 2920 } |
| OLD | NEW |