OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/contexts.h" | 5 #include "src/contexts.h" |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/debug/debug.h" | 8 #include "src/debug/debug.h" |
9 #include "src/isolate-inl.h" | 9 #include "src/isolate-inl.h" |
10 | 10 |
(...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
401 !context->IsNativeContext() && !context->IsWithContext()); | 401 !context->IsNativeContext() && !context->IsWithContext()); |
402 } | 402 } |
403 } while (follow_context_chain); | 403 } while (follow_context_chain); |
404 | 404 |
405 if (FLAG_trace_contexts) { | 405 if (FLAG_trace_contexts) { |
406 PrintF("=> no property/slot found\n"); | 406 PrintF("=> no property/slot found\n"); |
407 } | 407 } |
408 return Handle<Object>::null(); | 408 return Handle<Object>::null(); |
409 } | 409 } |
410 | 410 |
411 static const int kSharedOffset = 0; | |
412 static const int kCachedCodeOffset = 1; | |
413 static const int kLiteralsOffset = 2; | |
414 static const int kOsrAstIdOffset = 3; | |
415 static const int kEntryLength = 4; | |
416 static const int kInitialLength = kEntryLength; | |
417 | |
418 int Context::SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared, | |
419 BailoutId osr_ast_id) { | |
420 DisallowHeapAllocation no_gc; | |
421 DCHECK(this->IsNativeContext()); | |
422 if (!OptimizedCodeMapIsCleared()) { | |
423 FixedArray* optimized_code_map = this->osrd_function_table(); | |
424 int length = optimized_code_map->length(); | |
425 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | |
426 for (int i = 0; i < length; i += kEntryLength) { | |
427 if (WeakCell::cast(optimized_code_map->get(i + kSharedOffset))->value() == | |
428 shared && | |
429 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | |
430 return i; | |
431 } | |
432 } | |
433 } | |
434 return -1; | |
435 } | |
436 | |
437 void Context::SearchOptimizedCodeMap(SharedFunctionInfo* shared, | |
438 BailoutId osr_ast_id, Code** pcode, | |
439 LiteralsArray** pliterals) { | |
440 DCHECK(this->IsNativeContext()); | |
441 int entry = SearchOptimizedCodeMapEntry(shared, osr_ast_id); | |
442 if (entry != -1) { | |
443 FixedArray* code_map = osrd_function_table(); | |
444 DCHECK_LE(entry + kEntryLength, code_map->length()); | |
445 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset)); | |
446 WeakCell* literals_cell = | |
447 WeakCell::cast(code_map->get(entry + kLiteralsOffset)); | |
448 | |
449 *pcode = cell->cleared() ? nullptr : Code::cast(cell->value()); | |
450 *pliterals = literals_cell->cleared() | |
451 ? nullptr | |
452 : LiteralsArray::cast(literals_cell->value()); | |
453 } else { | |
454 *pcode = nullptr; | |
455 *pliterals = nullptr; | |
456 } | |
457 } | |
458 | |
459 void Context::AddToOptimizedCodeMap(Handle<Context> native_context, | |
460 Handle<SharedFunctionInfo> shared, | |
461 Handle<Code> code, | |
462 Handle<LiteralsArray> literals, | |
463 BailoutId osr_ast_id) { | |
464 DCHECK(native_context->IsNativeContext()); | |
465 Isolate* isolate = native_context->GetIsolate(); | |
466 if (isolate->serializer_enabled()) return; | |
467 | |
468 STATIC_ASSERT(kEntryLength == 4); | |
469 Handle<FixedArray> new_code_map; | |
470 int entry; | |
471 | |
472 if (native_context->OptimizedCodeMapIsCleared()) { | |
473 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); | |
474 entry = 0; | |
475 } else { | |
476 Handle<FixedArray> old_code_map(native_context->osrd_function_table(), | |
477 isolate); | |
478 entry = native_context->SearchOptimizedCodeMapEntry(*shared, osr_ast_id); | |
479 if (entry >= 0) { | |
480 // Just set the code and literals of the entry. | |
481 Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code); | |
ulan
2016/12/07 14:54:57
Since the code is optimized, Code::WeakCellFor(cod
| |
482 old_code_map->set(entry + kCachedCodeOffset, *code_cell); | |
483 Handle<WeakCell> literals_cell = | |
484 isolate->factory()->NewWeakCell(literals); | |
485 old_code_map->set(entry + kLiteralsOffset, *literals_cell); | |
486 return; | |
487 } | |
488 | |
489 // Can we reuse an entry? | |
490 DCHECK(entry < 0); | |
491 int length = old_code_map->length(); | |
492 for (int i = 0; i < length; i += kEntryLength) { | |
493 if (WeakCell::cast(old_code_map->get(i + kSharedOffset))->cleared()) { | |
494 new_code_map = old_code_map; | |
495 entry = i; | |
496 break; | |
497 } | |
498 } | |
499 | |
500 if (entry < 0) { | |
501 // Copy old optimized code map and append one new entry. | |
502 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( | |
503 old_code_map, kEntryLength, TENURED); | |
504 // TODO(mstarzinger): Temporary workaround. The allocation above might | |
Michael Starzinger
2016/12/07 09:26:31
This should no longer happen now that flushing has
mvstanton
2016/12/07 14:42:32
Done.
| |
505 // have flushed the optimized code map and the copy we created is full of | |
506 // holes. For now we just give up on adding the entry and pretend it got | |
507 // flushed. | |
508 if (native_context->OptimizedCodeMapIsCleared()) return; | |
509 entry = old_code_map->length(); | |
510 } | |
511 } | |
512 | |
513 // TODO(mvstanton): isn't there a cached weak cell in the Code class? | |
514 Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code); | |
ulan
2016/12/07 14:54:57
Code::WeakCellFor(code)
| |
515 Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals); | |
516 // TODO(mvstanton): Should we cache a weak cell for a SharedFunctionInfo? | |
517 Handle<WeakCell> shared_cell = isolate->factory()->NewWeakCell(shared); | |
518 | |
519 new_code_map->set(entry + kSharedOffset, *shared_cell); | |
520 new_code_map->set(entry + kCachedCodeOffset, *code_cell); | |
521 new_code_map->set(entry + kLiteralsOffset, *literals_cell); | |
522 new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); | |
523 | |
524 #ifdef DEBUG | |
525 for (int i = 0; i < new_code_map->length(); i += kEntryLength) { | |
526 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kSharedOffset)); | |
527 DCHECK(cell->cleared() || cell->value()->IsSharedFunctionInfo()); | |
528 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset)); | |
529 DCHECK(cell->cleared() || | |
530 (cell->value()->IsCode() && | |
531 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION)); | |
532 cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset)); | |
533 DCHECK(cell->cleared() || cell->value()->IsFixedArray()); | |
534 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | |
535 } | |
536 #endif | |
537 | |
538 FixedArray* old_code_map = native_context->osrd_function_table(); | |
539 if (old_code_map != *new_code_map) { | |
540 native_context->set_osrd_function_table(*new_code_map); | |
541 } | |
542 } | |
543 | |
544 void Context::EvictFromOptimizedCodeMap(Code* optimized_code, | |
545 const char* reason) { | |
546 DCHECK(IsNativeContext()); | |
547 DisallowHeapAllocation no_gc; | |
548 if (OptimizedCodeMapIsCleared()) return; | |
549 | |
550 Heap* heap = GetHeap(); | |
551 FixedArray* code_map = osrd_function_table(); | |
552 int dst = 0; | |
553 int length = code_map->length(); | |
554 for (int src = 0; src < length; src += kEntryLength) { | |
555 if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() == | |
556 optimized_code) { | |
557 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); | |
558 if (FLAG_trace_opt) { | |
559 PrintF( | |
560 "[evicting entry from native context optimizing code map (%s) for ", | |
561 reason); | |
562 ShortPrint(); | |
563 DCHECK(!osr.IsNone()); | |
564 PrintF(" (osr ast id %d)]\n", osr.ToInt()); | |
565 } | |
566 // Evict the src entry by not copying it to the dst entry. | |
567 continue; | |
568 } | |
569 // Keep the src entry by copying it to the dst entry. | |
570 if (dst != src) { | |
571 code_map->set(dst + kSharedOffset, code_map->get(src + kSharedOffset)); | |
572 code_map->set(dst + kCachedCodeOffset, | |
573 code_map->get(src + kCachedCodeOffset)); | |
574 code_map->set(dst + kLiteralsOffset, | |
575 code_map->get(src + kLiteralsOffset)); | |
576 code_map->set(dst + kOsrAstIdOffset, | |
577 code_map->get(src + kOsrAstIdOffset)); | |
578 } | |
579 dst += kEntryLength; | |
580 } | |
581 if (dst != length) { | |
582 // Always trim even when array is cleared because of heap verifier. | |
583 heap->RightTrimFixedArray(code_map, length - dst); | |
584 if (code_map->length() == 0) { | |
585 ClearOptimizedCodeMap(); | |
586 } | |
587 } | |
588 } | |
589 | |
590 void Context::ClearOptimizedCodeMap() { | |
591 DCHECK(IsNativeContext()); | |
592 FixedArray* empty_fixed_array = GetHeap()->empty_fixed_array(); | |
593 // TODO(mvstanton): we could skip the write barrier... | |
594 set_osrd_function_table(empty_fixed_array); | |
595 } | |
411 | 596 |
412 void Context::AddOptimizedFunction(JSFunction* function) { | 597 void Context::AddOptimizedFunction(JSFunction* function) { |
413 DCHECK(IsNativeContext()); | 598 DCHECK(IsNativeContext()); |
414 Isolate* isolate = GetIsolate(); | 599 Isolate* isolate = GetIsolate(); |
415 #ifdef ENABLE_SLOW_DCHECKS | 600 #ifdef ENABLE_SLOW_DCHECKS |
416 if (FLAG_enable_slow_asserts) { | 601 if (FLAG_enable_slow_asserts) { |
417 Object* element = get(OPTIMIZED_FUNCTIONS_LIST); | 602 Object* element = get(OPTIMIZED_FUNCTIONS_LIST); |
418 while (!element->IsUndefined(isolate)) { | 603 while (!element->IsUndefined(isolate)) { |
419 CHECK(element != function); | 604 CHECK(element != function); |
420 element = JSFunction::cast(element)->next_function_link(); | 605 element = JSFunction::cast(element)->next_function_link(); |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
588 | 773 |
589 int previous_value = errors_thrown()->value(); | 774 int previous_value = errors_thrown()->value(); |
590 set_errors_thrown(Smi::FromInt(previous_value + 1)); | 775 set_errors_thrown(Smi::FromInt(previous_value + 1)); |
591 } | 776 } |
592 | 777 |
593 | 778 |
594 int Context::GetErrorsThrown() { return errors_thrown()->value(); } | 779 int Context::GetErrorsThrown() { return errors_thrown()->value(); } |
595 | 780 |
596 } // namespace internal | 781 } // namespace internal |
597 } // namespace v8 | 782 } // namespace v8 |
OLD | NEW |