OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/contexts.h" | 5 #include "src/contexts.h" |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/isolate-inl.h" | 9 #include "src/isolate-inl.h" |
10 | 10 |
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
401 !context->IsNativeContext() && !context->IsWithContext()); | 401 !context->IsNativeContext() && !context->IsWithContext()); |
402 } | 402 } |
403 } while (follow_context_chain); | 403 } while (follow_context_chain); |
404 | 404 |
405 if (FLAG_trace_contexts) { | 405 if (FLAG_trace_contexts) { |
406 PrintF("=> no property/slot found\n"); | 406 PrintF("=> no property/slot found\n"); |
407 } | 407 } |
408 return Handle<Object>::null(); | 408 return Handle<Object>::null(); |
409 } | 409 } |
410 | 410 |
411 static const int kSharedOffset = 0; | |
412 static const int kCachedCodeOffset = 1; | |
413 static const int kLiteralsOffset = 2; | |
414 static const int kOsrAstIdOffset = 3; | |
415 static const int kEntryLength = 4; | |
416 static const int kInitialLength = kEntryLength; | |
417 | |
418 int Context::SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared, | |
419 BailoutId osr_ast_id) { | |
420 DisallowHeapAllocation no_gc; | |
421 DCHECK(this->IsNativeContext()); | |
422 if (!OptimizedCodeMapIsCleared()) { | |
423 FixedArray* optimized_code_map = this->osr_code_table(); | |
424 int length = optimized_code_map->length(); | |
425 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | |
426 for (int i = 0; i < length; i += kEntryLength) { | |
427 if (WeakCell::cast(optimized_code_map->get(i + kSharedOffset))->value() == | |
428 shared && | |
429 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | |
430 return i; | |
431 } | |
432 } | |
433 } | |
434 return -1; | |
435 } | |
436 | |
437 void Context::SearchOptimizedCodeMap(SharedFunctionInfo* shared, | |
438 BailoutId osr_ast_id, Code** pcode, | |
439 LiteralsArray** pliterals) { | |
440 DCHECK(this->IsNativeContext()); | |
441 int entry = SearchOptimizedCodeMapEntry(shared, osr_ast_id); | |
442 if (entry != -1) { | |
443 FixedArray* code_map = osr_code_table(); | |
444 DCHECK_LE(entry + kEntryLength, code_map->length()); | |
445 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset)); | |
446 WeakCell* literals_cell = | |
447 WeakCell::cast(code_map->get(entry + kLiteralsOffset)); | |
448 | |
449 *pcode = cell->cleared() ? nullptr : Code::cast(cell->value()); | |
450 *pliterals = literals_cell->cleared() | |
451 ? nullptr | |
452 : LiteralsArray::cast(literals_cell->value()); | |
453 } else { | |
454 *pcode = nullptr; | |
455 *pliterals = nullptr; | |
456 } | |
457 } | |
458 | |
459 void Context::AddToOptimizedCodeMap(Handle<Context> native_context, | |
460 Handle<SharedFunctionInfo> shared, | |
461 Handle<Code> code, | |
462 Handle<LiteralsArray> literals, | |
463 BailoutId osr_ast_id) { | |
464 DCHECK(native_context->IsNativeContext()); | |
465 Isolate* isolate = native_context->GetIsolate(); | |
466 if (isolate->serializer_enabled()) return; | |
467 | |
468 STATIC_ASSERT(kEntryLength == 4); | |
469 Handle<FixedArray> new_code_map; | |
470 int entry; | |
471 | |
472 if (native_context->OptimizedCodeMapIsCleared()) { | |
473 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | |
474 entry = 0; | |
475 } else { | |
476 Handle<FixedArray> old_code_map(native_context->osr_code_table(), isolate); | |
477 entry = native_context->SearchOptimizedCodeMapEntry(*shared, osr_ast_id); | |
478 if (entry >= 0) { | |
479 // Just set the code and literals of the entry. | |
480 Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code); | |
481 old_code_map->set(entry + kCachedCodeOffset, *code_cell); | |
482 Handle<WeakCell> literals_cell = | |
483 isolate->factory()->NewWeakCell(literals); | |
484 old_code_map->set(entry + kLiteralsOffset, *literals_cell); | |
485 return; | |
486 } | |
487 | |
488 // Can we reuse an entry? | |
489 DCHECK(entry < 0); | |
490 int length = old_code_map->length(); | |
491 for (int i = 0; i < length; i += kEntryLength) { | |
492 if (WeakCell::cast(old_code_map->get(i + kSharedOffset))->cleared()) { | |
493 new_code_map = old_code_map; | |
494 entry = i; | |
495 break; | |
496 } | |
497 } | |
498 | |
499 if (entry < 0) { | |
500 // Copy old optimized code map and append one new entry. | |
501 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | |
502 old_code_map, kEntryLength, TENURED); | |
503 entry = old_code_map->length(); | |
504 } | |
505 } | |
506 | |
507 Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code); | |
508 Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals); | |
509 Handle<WeakCell> shared_cell = isolate->factory()->NewWeakCell(shared); | |
510 | |
511 new_code_map->set(entry + kSharedOffset, *shared_cell); | |
512 new_code_map->set(entry + kCachedCodeOffset, *code_cell); | |
513 new_code_map->set(entry + kLiteralsOffset, *literals_cell); | |
514 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | |
515 | |
516 #ifdef DEBUG | |
517 for (int i = 0; i < new_code_map->length(); i += kEntryLength) { | |
518 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kSharedOffset)); | |
519 DCHECK(cell->cleared() || cell->value()->IsSharedFunctionInfo()); | |
520 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset)); | |
521 DCHECK(cell->cleared() || | |
522 (cell->value()->IsCode() && | |
523 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION)); | |
524 cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset)); | |
525 DCHECK(cell->cleared() || cell->value()->IsFixedArray()); | |
526 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | |
527 } | |
528 #endif | |
529 | |
530 FixedArray* old_code_map = native_context->osr_code_table(); | |
531 if (old_code_map != *new_code_map) { | |
532 native_context->set_osr_code_table(*new_code_map); | |
533 } | |
534 } | |
535 | |
536 void Context::EvictFromOptimizedCodeMap(Code* optimized_code, | |
537 const char* reason) { | |
538 DCHECK(IsNativeContext()); | |
539 DisallowHeapAllocation no_gc; | |
540 if (OptimizedCodeMapIsCleared()) return; | |
541 | |
542 Heap* heap = GetHeap(); | |
543 FixedArray* code_map = osr_code_table(); | |
544 int dst = 0; | |
545 int length = code_map->length(); | |
546 for (int src = 0; src < length; src += kEntryLength) { | |
547 if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() == | |
548 optimized_code) { | |
549 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | |
550 if (FLAG_trace_opt) { | |
551 PrintF( | |
552 "[evicting entry from native context optimizing code map (%s) for ", | |
553 reason); | |
554 ShortPrint(); | |
555 DCHECK(!osr.IsNone()); | |
556 PrintF(" (osr ast id %d)]\n", osr.ToInt()); | |
557 } | |
558 // Evict the src entry by not copying it to the dst entry. | |
559 continue; | |
560 } | |
561 // Keep the src entry by copying it to the dst entry. | |
562 if (dst != src) { | |
563 code_map->set(dst + kSharedOffset, code_map->get(src + kSharedOffset)); | |
564 code_map->set(dst + kCachedCodeOffset, | |
565 code_map->get(src + kCachedCodeOffset)); | |
566 code_map->set(dst + kLiteralsOffset, | |
567 code_map->get(src + kLiteralsOffset)); | |
568 code_map->set(dst + kOsrAstIdOffset, | |
569 code_map->get(src + kOsrAstIdOffset)); | |
570 } | |
571 dst += kEntryLength; | |
572 } | |
573 if (dst != length) { | |
574 // Always trim even when array is cleared because of heap verifier. | |
575 heap->RightTrimFixedArray(code_map, length - dst); | |
576 if (code_map->length() == 0) { | |
577 ClearOptimizedCodeMap(); | |
578 } | |
579 } | |
580 } | |
581 | |
582 void Context::ClearOptimizedCodeMap() { | |
583 DCHECK(IsNativeContext()); | |
584 FixedArray* empty_fixed_array = GetHeap()->empty_fixed_array(); | |
585 set_osr_code_table(empty_fixed_array); | |
586 } | |
587 | 411 |
588 void Context::AddOptimizedFunction(JSFunction* function) { | 412 void Context::AddOptimizedFunction(JSFunction* function) { |
589 DCHECK(IsNativeContext()); | 413 DCHECK(IsNativeContext()); |
590 Isolate* isolate = GetIsolate(); | 414 Isolate* isolate = GetIsolate(); |
591 #ifdef ENABLE_SLOW_DCHECKS | 415 #ifdef ENABLE_SLOW_DCHECKS |
592 if (FLAG_enable_slow_asserts) { | 416 if (FLAG_enable_slow_asserts) { |
593 Object* element = get(OPTIMIZED_FUNCTIONS_LIST); | 417 Object* element = get(OPTIMIZED_FUNCTIONS_LIST); |
594 while (!element->IsUndefined(isolate)) { | 418 while (!element->IsUndefined(isolate)) { |
595 CHECK(element != function); | 419 CHECK(element != function); |
596 element = JSFunction::cast(element)->next_function_link(); | 420 element = JSFunction::cast(element)->next_function_link(); |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
764 | 588 |
765 int previous_value = errors_thrown()->value(); | 589 int previous_value = errors_thrown()->value(); |
766 set_errors_thrown(Smi::FromInt(previous_value + 1)); | 590 set_errors_thrown(Smi::FromInt(previous_value + 1)); |
767 } | 591 } |
768 | 592 |
769 | 593 |
770 int Context::GetErrorsThrown() { return errors_thrown()->value(); } | 594 int Context::GetErrorsThrown() { return errors_thrown()->value(); } |
771 | 595 |
772 } // namespace internal | 596 } // namespace internal |
773 } // namespace v8 | 597 } // namespace v8 |
OLD | NEW |