Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(98)

Side by Side Diff: src/objects.cc

Issue 2674593003: [TypeFeedbackVector] Root feedback vectors at function literal site. (Closed)
Patch Set: REBASE. Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/objects.h" 5 #include "src/objects.h"
6 6
7 #include <cmath> 7 #include <cmath>
8 #include <iomanip> 8 #include <iomanip>
9 #include <memory> 9 #include <memory>
10 #include <sstream> 10 #include <sstream>
(...skipping 11969 matching lines...) Expand 10 before | Expand all | Expand 10 after
11980 isolate->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); 11980 isolate->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
11981 // No write barrier required, since the builtin is part of the root set. 11981 // No write barrier required, since the builtin is part of the root set.
11982 if (FLAG_mark_shared_functions_for_tier_up) { 11982 if (FLAG_mark_shared_functions_for_tier_up) {
11983 // TODO(leszeks): The compilation isn't concurrent if we trigger it using 11983 // TODO(leszeks): The compilation isn't concurrent if we trigger it using
11984 // this bit. 11984 // this bit.
11985 shared()->set_marked_for_tier_up(true); 11985 shared()->set_marked_for_tier_up(true);
11986 } 11986 }
11987 } 11987 }
11988 11988
11989 // static 11989 // static
11990 Handle<TypeFeedbackVector> SharedFunctionInfo::FindOrCreateVector(
11991 Handle<SharedFunctionInfo> shared, Handle<Context> native_context) {
11992 Isolate* isolate = shared->GetIsolate();
11993 CodeAndVector result =
11994 shared->SearchOptimizedCodeMap(*native_context, BailoutId::None());
11995 if (result.vector != nullptr) {
11996 DCHECK(shared->feedback_metadata()->is_empty() ||
11997 !result.vector->is_empty());
11998 return handle(result.vector, isolate);
11999 }
12000
12001 Handle<TypeFeedbackVector> vector =
12002 TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
12003 Handle<Code> code;
12004 if (result.code != nullptr) {
12005 code = Handle<Code>(result.code, isolate);
12006 }
12007 AddToOptimizedCodeMap(shared, native_context, code, vector,
12008 BailoutId::None());
12009 return vector;
12010 }
12011
12012 // static
12013 void SharedFunctionInfo::AddToOptimizedCodeMap( 11990 void SharedFunctionInfo::AddToOptimizedCodeMap(
12014 Handle<SharedFunctionInfo> shared, Handle<Context> native_context, 11991 Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
12015 MaybeHandle<Code> code, Handle<TypeFeedbackVector> vector, 11992 Handle<Code> code, BailoutId osr_ast_id) {
12016 BailoutId osr_ast_id) {
12017 Isolate* isolate = shared->GetIsolate(); 11993 Isolate* isolate = shared->GetIsolate();
12018 if (isolate->serializer_enabled()) return; 11994 if (isolate->serializer_enabled()) return;
12019 DCHECK(code.is_null() || 11995 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
12020 code.ToHandleChecked()->kind() == Code::OPTIMIZED_FUNCTION);
12021 DCHECK(native_context->IsNativeContext()); 11996 DCHECK(native_context->IsNativeContext());
12022 STATIC_ASSERT(kEntryLength == 3); 11997 STATIC_ASSERT(kEntryLength == 2);
12023 Handle<FixedArray> new_code_map; 11998 Handle<FixedArray> new_code_map;
12024 int entry; 11999 int entry;
12025 12000
12026 if (!osr_ast_id.IsNone()) { 12001 if (!osr_ast_id.IsNone()) {
12027 Context::AddToOptimizedCodeMap(native_context, shared, 12002 Context::AddToOptimizedCodeMap(native_context, shared, code, osr_ast_id);
12028 code.ToHandleChecked(), vector, osr_ast_id);
12029 return; 12003 return;
12030 } 12004 }
12031 12005
12032 DCHECK(osr_ast_id.IsNone()); 12006 DCHECK(osr_ast_id.IsNone());
12033 if (shared->OptimizedCodeMapIsCleared()) { 12007 if (shared->OptimizedCodeMapIsCleared()) {
12034 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); 12008 new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
12035 entry = kEntriesStart; 12009 entry = kEntriesStart;
12036 } else { 12010 } else {
12037 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate); 12011 Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate);
12038 entry = shared->SearchOptimizedCodeMapEntry(*native_context); 12012 entry = shared->SearchOptimizedCodeMapEntry(*native_context);
12039 if (entry >= kEntriesStart) { 12013 if (entry >= kEntriesStart) {
12040 // Just set the code and vector of the entry. 12014 // Just set the code of the entry.
12041 if (!code.is_null()) { 12015 Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
12042 Handle<WeakCell> code_cell = 12016 old_code_map->set(entry + kCachedCodeOffset, *code_cell);
12043 isolate->factory()->NewWeakCell(code.ToHandleChecked());
12044 old_code_map->set(entry + kCachedCodeOffset, *code_cell);
12045 }
12046 Handle<WeakCell> vector_cell = isolate->factory()->NewWeakCell(vector);
12047 old_code_map->set(entry + kFeedbackVectorOffset, *vector_cell);
12048 return; 12017 return;
12049 } 12018 }
12050 12019
12051 // Can we reuse an entry? 12020 // Can we reuse an entry?
12052 DCHECK(entry < kEntriesStart); 12021 DCHECK(entry < kEntriesStart);
12053 int length = old_code_map->length(); 12022 int length = old_code_map->length();
12054 for (int i = kEntriesStart; i < length; i += kEntryLength) { 12023 for (int i = kEntriesStart; i < length; i += kEntryLength) {
12055 if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) { 12024 if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) {
12056 new_code_map = old_code_map; 12025 new_code_map = old_code_map;
12057 entry = i; 12026 entry = i;
12058 break; 12027 break;
12059 } 12028 }
12060 } 12029 }
12061 12030
12062 if (entry < kEntriesStart) { 12031 if (entry < kEntriesStart) {
12063 // Copy old optimized code map and append one new entry. 12032 // Copy old optimized code map and append one new entry.
12064 new_code_map = isolate->factory()->CopyFixedArrayAndGrow( 12033 new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
12065 old_code_map, kEntryLength, TENURED); 12034 old_code_map, kEntryLength, TENURED);
12066 // TODO(mstarzinger): Temporary workaround. The allocation above might 12035 // TODO(mstarzinger): Temporary workaround. The allocation above might
12067 // have flushed the optimized code map and the copy we created is full of 12036 // have flushed the optimized code map and the copy we created is full of
12068 // holes. For now we just give up on adding the entry and pretend it got 12037 // holes. For now we just give up on adding the entry and pretend it got
12069 // flushed. 12038 // flushed.
12070 if (shared->OptimizedCodeMapIsCleared()) return; 12039 if (shared->OptimizedCodeMapIsCleared()) return;
12071 entry = old_code_map->length(); 12040 entry = old_code_map->length();
12072 } 12041 }
12073 } 12042 }
12074 12043
12075 Handle<WeakCell> code_cell = 12044 Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
12076 code.is_null() ? isolate->factory()->empty_weak_cell()
12077 : isolate->factory()->NewWeakCell(code.ToHandleChecked());
12078 Handle<WeakCell> vector_cell = isolate->factory()->NewWeakCell(vector);
12079 WeakCell* context_cell = native_context->self_weak_cell(); 12045 WeakCell* context_cell = native_context->self_weak_cell();
12080 12046
12081 new_code_map->set(entry + kContextOffset, context_cell); 12047 new_code_map->set(entry + kContextOffset, context_cell);
12082 new_code_map->set(entry + kCachedCodeOffset, *code_cell); 12048 new_code_map->set(entry + kCachedCodeOffset, *code_cell);
12083 new_code_map->set(entry + kFeedbackVectorOffset, *vector_cell);
12084 12049
12085 #ifdef DEBUG 12050 #ifdef DEBUG
12086 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { 12051 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
12087 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset)); 12052 WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset));
12088 DCHECK(cell->cleared() || cell->value()->IsNativeContext()); 12053 DCHECK(cell->cleared() || cell->value()->IsNativeContext());
12089 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset)); 12054 cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset));
12090 DCHECK(cell->cleared() || 12055 DCHECK(cell->cleared() ||
12091 (cell->value()->IsCode() && 12056 (cell->value()->IsCode() &&
12092 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION)); 12057 Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
12093 cell = WeakCell::cast(new_code_map->get(i + kFeedbackVectorOffset));
12094 DCHECK(cell->cleared() || cell->value()->IsFixedArray());
12095 } 12058 }
12096 #endif 12059 #endif
12097 12060
12098 FixedArray* old_code_map = shared->optimized_code_map(); 12061 FixedArray* old_code_map = shared->optimized_code_map();
12099 if (old_code_map != *new_code_map) { 12062 if (old_code_map != *new_code_map) {
12100 shared->set_optimized_code_map(*new_code_map); 12063 shared->set_optimized_code_map(*new_code_map);
12101 } 12064 }
12102 } 12065 }
12103 12066
12104 12067
(...skipping 17 matching lines...) Expand all
12122 DCHECK(WeakCell::cast(code_map->get(src))->cleared() || 12085 DCHECK(WeakCell::cast(code_map->get(src))->cleared() ||
12123 WeakCell::cast(code_map->get(src))->value()->IsNativeContext()); 12086 WeakCell::cast(code_map->get(src))->value()->IsNativeContext());
12124 found = WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() == 12087 found = WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
12125 optimized_code; 12088 optimized_code;
12126 if (found) { 12089 if (found) {
12127 if (FLAG_trace_opt) { 12090 if (FLAG_trace_opt) {
12128 PrintF("[evicting entry from optimizing code map (%s) for ", reason); 12091 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
12129 ShortPrint(); 12092 ShortPrint();
12130 PrintF("]\n"); 12093 PrintF("]\n");
12131 } 12094 }
12132 // Just clear the code in order to continue sharing a feedback vector. 12095 // Just clear the code.
12133 code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(), 12096 code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(),
12134 SKIP_WRITE_BARRIER); 12097 SKIP_WRITE_BARRIER);
12135 } 12098 }
12136 } 12099 }
12137 } 12100 }
12138 12101
12139 if (!found) { 12102 if (!found) {
12140 // We didn't find the code in here. It must be osr'd code. 12103 // We didn't find the code in here. It must be osr'd code.
12141 isolate->EvictOSROptimizedCode(optimized_code, reason); 12104 isolate->EvictOSROptimizedCode(optimized_code, reason);
12142 } 12105 }
12143 } 12106 }
12144 12107
12145 // static 12108 // static
12146 void JSFunction::EnsureLiterals(Handle<JSFunction> function) { 12109 void JSFunction::EnsureLiterals(Handle<JSFunction> function) {
12147 Handle<SharedFunctionInfo> shared(function->shared()); 12110 Handle<SharedFunctionInfo> shared(function->shared());
12148 Handle<Context> native_context(function->context()->native_context()); 12111 Handle<Context> native_context(function->context()->native_context());
12149 if (function->feedback_vector() == 12112 Isolate* isolate = shared->GetIsolate();
12150 function->GetIsolate()->heap()->empty_type_feedback_vector()) { 12113
12151 Handle<TypeFeedbackVector> vector = 12114 Cell* cell = function->feedback_vector_cell();
12152 SharedFunctionInfo::FindOrCreateVector(shared, native_context); 12115 if (cell == isolate->heap()->undefined_cell()) {
12153 function->set_feedback_vector(*vector); 12116 if (FLAG_trace_strong_rooted_literals) {
12117 PrintF("EnsureLiterals: Installing literals cell in %s %p\n",
12118 shared->DebugName()->ToCString().get(),
12119 reinterpret_cast<void*>(*function));
12120 }
12121 // Top level code didn't get it's literals installed.
12122 Handle<TypeFeedbackVector> feedback_vector =
12123 TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
12124 Handle<Cell> new_cell = isolate->factory()->NewCell(feedback_vector);
12125 function->set_feedback_vector_cell(*new_cell);
12126 } else if (!cell->value()->IsTypeFeedbackVector() ||
12127 !function->has_feedback_vector()) {
12128 DCHECK(cell != isolate->heap()->undefined_cell());
12129 if (FLAG_trace_strong_rooted_literals) {
12130 PrintF("EnsureLiterals: Update literals cell in %s %p\n",
12131 shared->DebugName()->ToCString().get(),
12132 reinterpret_cast<void*>(*function));
12133 }
12134 Handle<TypeFeedbackVector> feedback_vector =
12135 TypeFeedbackVector::New(isolate, handle(shared->feedback_metadata()));
12136 // Re-get the feedback_vector() value as GC may have occurred.
12137 function->feedback_vector_cell()->set_value(*feedback_vector);
12138 } else {
12139 if (FLAG_trace_strong_rooted_literals) {
12140 PrintF("EnsureLiterals: did nothing for %s %p\n",
12141 shared->DebugName()->ToCString().get(),
12142 reinterpret_cast<void*>(*function));
12143 }
12154 } 12144 }
12145
12146 // No matter what, ensure some post-conditions.
12147 DCHECK(shared->feedback_metadata()->slot_count() != 0 ||
12148 function->feedback_vector() ==
12149 shared->GetIsolate()->heap()->empty_type_feedback_vector());
12155 } 12150 }
12156 12151
12157 static void GetMinInobjectSlack(Map* map, void* data) { 12152 static void GetMinInobjectSlack(Map* map, void* data) {
12158 int slack = map->unused_property_fields(); 12153 int slack = map->unused_property_fields();
12159 if (*reinterpret_cast<int*>(data) > slack) { 12154 if (*reinterpret_cast<int*>(data) > slack) {
12160 *reinterpret_cast<int*>(data) = slack; 12155 *reinterpret_cast<int*>(data) = slack;
12161 } 12156 }
12162 } 12157 }
12163 12158
12164 12159
(...skipping 1558 matching lines...) Expand 10 before | Expand all | Expand 10 after
13723 FixedArray* optimized_code_map = this->optimized_code_map(); 13718 FixedArray* optimized_code_map = this->optimized_code_map();
13724 int length = optimized_code_map->length(); 13719 int length = optimized_code_map->length();
13725 WeakCell* empty_weak_cell = GetHeap()->empty_weak_cell(); 13720 WeakCell* empty_weak_cell = GetHeap()->empty_weak_cell();
13726 for (int i = kEntriesStart; i < length; i += kEntryLength) { 13721 for (int i = kEntriesStart; i < length; i += kEntryLength) {
13727 optimized_code_map->set(i + kCachedCodeOffset, empty_weak_cell, 13722 optimized_code_map->set(i + kCachedCodeOffset, empty_weak_cell,
13728 SKIP_WRITE_BARRIER); 13723 SKIP_WRITE_BARRIER);
13729 } 13724 }
13730 } 13725 }
13731 } 13726 }
13732 13727
13733 CodeAndVector SharedFunctionInfo::SearchOptimizedCodeMap( 13728 Code* SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
13734 Context* native_context, BailoutId osr_ast_id) { 13729 BailoutId osr_ast_id) {
13735 CodeAndVector result = {nullptr, nullptr}; 13730 Code* result = nullptr;
13736 if (!osr_ast_id.IsNone()) { 13731 if (!osr_ast_id.IsNone()) {
13737 Code* code; 13732 return native_context->SearchOptimizedCodeMap(this, osr_ast_id);
13738 TypeFeedbackVector* vector;
13739 native_context->SearchOptimizedCodeMap(this, osr_ast_id, &code, &vector);
13740 result = {code, vector};
13741 return result;
13742 } 13733 }
13743 13734
13744 DCHECK(osr_ast_id.IsNone()); 13735 DCHECK(osr_ast_id.IsNone());
13745 int entry = SearchOptimizedCodeMapEntry(native_context); 13736 int entry = SearchOptimizedCodeMapEntry(native_context);
13746 if (entry != kNotFound) { 13737 if (entry != kNotFound) {
13747 FixedArray* code_map = optimized_code_map(); 13738 FixedArray* code_map = optimized_code_map();
13748 DCHECK_LE(entry + kEntryLength, code_map->length()); 13739 DCHECK_LE(entry + kEntryLength, code_map->length());
13749 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset)); 13740 WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
13750 WeakCell* vector_cell = 13741 result = cell->cleared() ? nullptr : Code::cast(cell->value());
13751 WeakCell::cast(code_map->get(entry + kFeedbackVectorOffset));
13752
13753 result = {cell->cleared() ? nullptr : Code::cast(cell->value()),
13754 vector_cell->cleared() ? nullptr : TypeFeedbackVector::cast(
13755 vector_cell->value())};
13756 } 13742 }
13757 return result; 13743 return result;
13758 } 13744 }
13759 13745
13760 13746
13761 #define DECLARE_TAG(ignore1, name, ignore2) name, 13747 #define DECLARE_TAG(ignore1, name, ignore2) name,
13762 const char* const VisitorSynchronization::kTags[ 13748 const char* const VisitorSynchronization::kTags[
13763 VisitorSynchronization::kNumberOfSyncTags] = { 13749 VisitorSynchronization::kNumberOfSyncTags] = {
13764 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 13750 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
13765 }; 13751 };
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
14025 int p = it.source_position().ScriptOffset(); 14011 int p = it.source_position().ScriptOffset();
14026 if (statement_position < p && p <= position) { 14012 if (statement_position < p && p <= position) {
14027 statement_position = p; 14013 statement_position = p;
14028 } 14014 }
14029 } 14015 }
14030 } 14016 }
14031 return statement_position; 14017 return statement_position;
14032 } 14018 }
14033 14019
14034 void JSFunction::ClearTypeFeedbackInfo() { 14020 void JSFunction::ClearTypeFeedbackInfo() {
14035 feedback_vector()->ClearSlots(shared()); 14021 if (feedback_vector_cell()->value()->IsTypeFeedbackVector()) {
14022 TypeFeedbackVector* vector = feedback_vector();
14023 vector->ClearSlots(shared());
14024 }
14036 } 14025 }
14037 14026
14038 void JSFunction::ClearTypeFeedbackInfoAtGCTime() { 14027 void JSFunction::ClearTypeFeedbackInfoAtGCTime() {
14039 feedback_vector()->ClearSlotsAtGCTime(shared()); 14028 if (feedback_vector_cell()->value()->IsTypeFeedbackVector()) {
14029 TypeFeedbackVector* vector = feedback_vector();
14030 vector->ClearSlotsAtGCTime(shared());
14031 }
14040 } 14032 }
14041 14033
14042 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) { 14034 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
14043 DisallowHeapAllocation no_gc; 14035 DisallowHeapAllocation no_gc;
14044 DCHECK(kind() == FUNCTION); 14036 DCHECK(kind() == FUNCTION);
14045 BackEdgeTable back_edges(this, &no_gc); 14037 BackEdgeTable back_edges(this, &no_gc);
14046 for (uint32_t i = 0; i < back_edges.length(); i++) { 14038 for (uint32_t i = 0; i < back_edges.length(); i++) {
14047 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i); 14039 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
14048 } 14040 }
14049 return BailoutId::None(); 14041 return BailoutId::None();
(...skipping 3418 matching lines...) Expand 10 before | Expand all | Expand 10 after
17468 Isolate* isolate = GetIsolate(); 17460 Isolate* isolate = GetIsolate();
17469 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 17461 Handle<SharedFunctionInfo> shared(context->closure()->shared());
17470 StringSharedKey key(src, shared, language_mode, kNoSourcePosition); 17462 StringSharedKey key(src, shared, language_mode, kNoSourcePosition);
17471 int entry = FindEntry(&key); 17463 int entry = FindEntry(&key);
17472 if (entry == kNotFound) return isolate->factory()->undefined_value(); 17464 if (entry == kNotFound) return isolate->factory()->undefined_value();
17473 int index = EntryToIndex(entry); 17465 int index = EntryToIndex(entry);
17474 if (!get(index)->IsFixedArray()) return isolate->factory()->undefined_value(); 17466 if (!get(index)->IsFixedArray()) return isolate->factory()->undefined_value();
17475 return Handle<Object>(get(index + 1), isolate); 17467 return Handle<Object>(get(index + 1), isolate);
17476 } 17468 }
17477 17469
17470 static Cell* SearchLiteralsMap(CompilationCacheTable* cache, int cache_entry,
17471 Context* native_context);
17472 static void AddToLiteralsMap(Handle<CompilationCacheTable> cache,
17473 int cache_entry, Handle<Context> native_context,
17474 Handle<Cell> literals);
17478 17475
17479 Handle<Object> CompilationCacheTable::LookupEval( 17476 InfoVectorPair CompilationCacheTable::LookupScript(Handle<String> src,
17477 Handle<Context> context,
17478 LanguageMode language_mode) {
17479 InfoVectorPair empty_result;
17480 Isolate* isolate = GetIsolate();
17481 Handle<SharedFunctionInfo> shared(context->closure()->shared());
17482 StringSharedKey key(src, shared, language_mode, kNoSourcePosition);
17483 int entry = FindEntry(&key);
17484 if (entry == kNotFound) return empty_result;
17485 int index = EntryToIndex(entry);
17486 if (!get(index)->IsFixedArray()) return empty_result;
17487 Object* obj = get(index + 1);
17488 if (obj->IsSharedFunctionInfo()) {
17489 Cell* literals =
17490 SearchLiteralsMap(this, index + 2, context->native_context());
17491 return InfoVectorPair(SharedFunctionInfo::cast(obj), literals);
17492 }
17493 return empty_result;
17494 }
17495
17496 InfoVectorPair CompilationCacheTable::LookupEval(
17480 Handle<String> src, Handle<SharedFunctionInfo> outer_info, 17497 Handle<String> src, Handle<SharedFunctionInfo> outer_info,
17481 LanguageMode language_mode, int scope_position) { 17498 Handle<Context> native_context, LanguageMode language_mode,
17482 Isolate* isolate = GetIsolate(); 17499 int scope_position) {
17500 InfoVectorPair empty_result;
17483 // Cache key is the tuple (source, outer shared function info, scope position) 17501 // Cache key is the tuple (source, outer shared function info, scope position)
17484 // to unambiguously identify the context chain the cached eval code assumes. 17502 // to unambiguously identify the context chain the cached eval code assumes.
17485 StringSharedKey key(src, outer_info, language_mode, scope_position); 17503 StringSharedKey key(src, outer_info, language_mode, scope_position);
17486 int entry = FindEntry(&key); 17504 int entry = FindEntry(&key);
17487 if (entry == kNotFound) return isolate->factory()->undefined_value(); 17505 if (entry == kNotFound) return empty_result;
17488 int index = EntryToIndex(entry); 17506 int index = EntryToIndex(entry);
17489 if (!get(index)->IsFixedArray()) return isolate->factory()->undefined_value(); 17507 if (!get(index)->IsFixedArray()) return empty_result;
17490 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 17508 Object* obj = get(EntryToIndex(entry) + 1);
17509 if (obj->IsSharedFunctionInfo()) {
17510 Cell* literals =
17511 SearchLiteralsMap(this, EntryToIndex(entry) + 2, *native_context);
17512 return InfoVectorPair(SharedFunctionInfo::cast(obj), literals);
17513 }
17514 return empty_result;
17491 } 17515 }
17492 17516
17493
17494 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src, 17517 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
17495 JSRegExp::Flags flags) { 17518 JSRegExp::Flags flags) {
17496 Isolate* isolate = GetIsolate(); 17519 Isolate* isolate = GetIsolate();
17497 DisallowHeapAllocation no_allocation; 17520 DisallowHeapAllocation no_allocation;
17498 RegExpKey key(src, flags); 17521 RegExpKey key(src, flags);
17499 int entry = FindEntry(&key); 17522 int entry = FindEntry(&key);
17500 if (entry == kNotFound) return isolate->factory()->undefined_value(); 17523 if (entry == kNotFound) return isolate->factory()->undefined_value();
17501 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 17524 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
17502 } 17525 }
17503 17526
17504 17527
17505 Handle<CompilationCacheTable> CompilationCacheTable::Put( 17528 Handle<CompilationCacheTable> CompilationCacheTable::Put(
17506 Handle<CompilationCacheTable> cache, Handle<String> src, 17529 Handle<CompilationCacheTable> cache, Handle<String> src,
17507 Handle<Context> context, LanguageMode language_mode, Handle<Object> value) { 17530 Handle<Context> context, LanguageMode language_mode, Handle<Object> value) {
17508 Isolate* isolate = cache->GetIsolate(); 17531 Isolate* isolate = cache->GetIsolate();
17509 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 17532 Handle<SharedFunctionInfo> shared(context->closure()->shared());
17510 StringSharedKey key(src, shared, language_mode, kNoSourcePosition); 17533 StringSharedKey key(src, shared, language_mode, kNoSourcePosition);
17511 Handle<Object> k = key.AsHandle(isolate); 17534 Handle<Object> k = key.AsHandle(isolate);
17512 cache = EnsureCapacity(cache, 1, &key); 17535 cache = EnsureCapacity(cache, 1, &key);
17513 int entry = cache->FindInsertionEntry(key.Hash()); 17536 int entry = cache->FindInsertionEntry(key.Hash());
17514 cache->set(EntryToIndex(entry), *k); 17537 cache->set(EntryToIndex(entry), *k);
17515 cache->set(EntryToIndex(entry) + 1, *value); 17538 cache->set(EntryToIndex(entry) + 1, *value);
17516 cache->ElementAdded(); 17539 cache->ElementAdded();
17517 return cache; 17540 return cache;
17518 } 17541 }
17519 17542
17543 Handle<CompilationCacheTable> CompilationCacheTable::PutScript(
17544 Handle<CompilationCacheTable> cache, Handle<String> src,
17545 Handle<Context> context, LanguageMode language_mode,
17546 Handle<SharedFunctionInfo> value, Handle<Cell> literals) {
17547 Isolate* isolate = cache->GetIsolate();
17548 Handle<SharedFunctionInfo> shared(context->closure()->shared());
17549 // TODO(mvstanton): is the context always a native context already?
17550 Handle<Context> native_context(context->native_context());
17551 StringSharedKey key(src, shared, language_mode, kNoSourcePosition);
17552 Handle<Object> k = key.AsHandle(isolate);
17553 cache = EnsureCapacity(cache, 1, &key);
17554 int entry = cache->FindInsertionEntry(key.Hash());
17555 cache->set(EntryToIndex(entry), *k);
17556 cache->set(EntryToIndex(entry) + 1, *value);
17557 AddToLiteralsMap(cache, EntryToIndex(entry) + 2, native_context, literals);
17558 cache->ElementAdded();
17559 return cache;
17560 }
17561
17562 const int kLiteralEntryLength = 2;
17563 const int kLiteralInitialLength = 2;
17564 const int kLiteralContextOffset = 0;
17565 const int kLiteralLiteralsOffset = 1;
17566
17567 static int SearchLiteralsMapEntry(CompilationCacheTable* cache, int cache_entry,
Michael Starzinger 2017/02/02 13:42:07 nit: Use anonymous namespace instead of static. I
mvstanton 2017/02/03 12:16:37 Excellent, I did both.
17568 Context* native_context) {
17569 DisallowHeapAllocation no_gc;
17570 DCHECK(native_context->IsNativeContext());
17571 Object* obj = cache->get(cache_entry);
17572
17573 if (obj->IsFixedArray()) {
17574 FixedArray* literals_map = FixedArray::cast(obj);
17575 int length = literals_map->length();
17576 for (int i = 0; i < length; i += kLiteralEntryLength) {
17577 if (WeakCell::cast(literals_map->get(i + kLiteralContextOffset))
17578 ->value() == native_context) {
17579 return i;
17580 }
17581 }
17582 }
17583 return -1;
17584 }
17585
17586 static void AddToLiteralsMap(Handle<CompilationCacheTable> cache,
17587 int cache_entry, Handle<Context> native_context,
17588 Handle<Cell> literals) {
17589 Isolate* isolate = native_context->GetIsolate();
17590 DCHECK(native_context->IsNativeContext());
17591 STATIC_ASSERT(kLiteralEntryLength == 2);
17592 Handle<FixedArray> new_literals_map;
17593 int entry;
17594
17595 Object* obj = cache->get(cache_entry);
17596
17597 if (!obj->IsFixedArray() || FixedArray::cast(obj)->length() == 0) {
17598 new_literals_map =
17599 isolate->factory()->NewFixedArray(kLiteralInitialLength, TENURED);
17600 entry = 0;
17601 } else {
17602 Handle<FixedArray> old_literals_map(FixedArray::cast(obj), isolate);
17603 entry = SearchLiteralsMapEntry(*cache, cache_entry, *native_context);
17604 if (entry >= 0) {
17605 // Just set the code of the entry.
17606 Handle<WeakCell> literals_cell =
17607 isolate->factory()->NewWeakCell(literals);
17608 old_literals_map->set(entry + kLiteralLiteralsOffset, *literals_cell);
17609 return;
17610 }
17611
17612 // Can we reuse an entry?
17613 DCHECK(entry < 0);
17614 int length = old_literals_map->length();
17615 for (int i = 0; i < length; i += kLiteralEntryLength) {
17616 if (WeakCell::cast(old_literals_map->get(i + kLiteralContextOffset))
17617 ->cleared()) {
17618 new_literals_map = old_literals_map;
17619 entry = i;
17620 break;
17621 }
17622 }
17623
17624 if (entry < 0) {
17625 // Copy old optimized code map and append one new entry.
17626 new_literals_map = isolate->factory()->CopyFixedArrayAndGrow(
17627 old_literals_map, kLiteralEntryLength, TENURED);
17628 entry = old_literals_map->length();
17629 }
17630 }
17631
17632 Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals);
17633 WeakCell* context_cell = native_context->self_weak_cell();
17634
17635 new_literals_map->set(entry + kLiteralContextOffset, context_cell);
17636 new_literals_map->set(entry + kLiteralLiteralsOffset, *literals_cell);
17637
17638 #ifdef DEBUG
17639 for (int i = 0; i < new_literals_map->length(); i += kLiteralEntryLength) {
17640 WeakCell* cell =
17641 WeakCell::cast(new_literals_map->get(i + kLiteralContextOffset));
17642 DCHECK(cell->cleared() || cell->value()->IsNativeContext());
17643 cell = WeakCell::cast(new_literals_map->get(i + kLiteralLiteralsOffset));
17644 DCHECK(cell->cleared() || (cell->value()->IsCell()));
17645 }
17646 #endif
17647
17648 Object* old_literals_map = cache->get(cache_entry);
17649 if (old_literals_map != *new_literals_map) {
17650 cache->set(cache_entry, *new_literals_map);
17651 }
17652 }
17653
17654 static Cell* SearchLiteralsMap(CompilationCacheTable* cache, int cache_entry,
17655 Context* native_context) {
17656 Cell* result = nullptr;
17657 int entry = SearchLiteralsMapEntry(cache, cache_entry, native_context);
17658 if (entry >= 0) {
17659 FixedArray* literals_map = FixedArray::cast(cache->get(cache_entry));
17660 DCHECK_LE(entry + kLiteralEntryLength, literals_map->length());
17661 WeakCell* cell =
17662 WeakCell::cast(literals_map->get(entry + kLiteralLiteralsOffset));
17663
17664 result = cell->cleared() ? nullptr : Cell::cast(cell->value());
17665 }
17666 DCHECK(result == nullptr || result->IsCell());
17667 return result;
17668 }
17520 17669
17521 Handle<CompilationCacheTable> CompilationCacheTable::PutEval( 17670 Handle<CompilationCacheTable> CompilationCacheTable::PutEval(
17522 Handle<CompilationCacheTable> cache, Handle<String> src, 17671 Handle<CompilationCacheTable> cache, Handle<String> src,
17523 Handle<SharedFunctionInfo> outer_info, Handle<SharedFunctionInfo> value, 17672 Handle<SharedFunctionInfo> outer_info, Handle<SharedFunctionInfo> value,
17524 int scope_position) { 17673 Handle<Context> native_context, Handle<Cell> literals, int scope_position) {
17525 Isolate* isolate = cache->GetIsolate(); 17674 Isolate* isolate = cache->GetIsolate();
17526 StringSharedKey key(src, outer_info, value->language_mode(), scope_position); 17675 StringSharedKey key(src, outer_info, value->language_mode(), scope_position);
17527 { 17676 {
17528 Handle<Object> k = key.AsHandle(isolate); 17677 Handle<Object> k = key.AsHandle(isolate);
17529 DisallowHeapAllocation no_allocation_scope; 17678 // DisallowHeapAllocation no_allocation_scope;
Michael Starzinger 2017/02/02 13:42:07 Looks like a left-over. As discussed offline, the
mvstanton 2017/02/03 12:16:37 Thanks, I've added a comment about this - good poi
17530 int entry = cache->FindEntry(&key); 17679 int entry = cache->FindEntry(&key);
17531 if (entry != kNotFound) { 17680 if (entry != kNotFound) {
17532 cache->set(EntryToIndex(entry), *k); 17681 cache->set(EntryToIndex(entry), *k);
17533 cache->set(EntryToIndex(entry) + 1, *value); 17682 cache->set(EntryToIndex(entry) + 1, *value);
17683 AddToLiteralsMap(cache, EntryToIndex(entry) + 2, native_context,
17684 literals);
17534 return cache; 17685 return cache;
17535 } 17686 }
17536 } 17687 }
17537 17688
17538 cache = EnsureCapacity(cache, 1, &key); 17689 cache = EnsureCapacity(cache, 1, &key);
17539 int entry = cache->FindInsertionEntry(key.Hash()); 17690 int entry = cache->FindInsertionEntry(key.Hash());
17540 Handle<Object> k = 17691 Handle<Object> k =
17541 isolate->factory()->NewNumber(static_cast<double>(key.Hash())); 17692 isolate->factory()->NewNumber(static_cast<double>(key.Hash()));
17542 cache->set(EntryToIndex(entry), *k); 17693 cache->set(EntryToIndex(entry), *k);
17543 cache->set(EntryToIndex(entry) + 1, Smi::FromInt(kHashGenerations)); 17694 cache->set(EntryToIndex(entry) + 1, Smi::FromInt(kHashGenerations));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
17578 } else { 17729 } else {
17579 NoWriteBarrierSet(this, value_index, count); 17730 NoWriteBarrierSet(this, value_index, count);
17580 } 17731 }
17581 } else if (get(entry_index)->IsFixedArray()) { 17732 } else if (get(entry_index)->IsFixedArray()) {
17582 SharedFunctionInfo* info = SharedFunctionInfo::cast(get(value_index)); 17733 SharedFunctionInfo* info = SharedFunctionInfo::cast(get(value_index));
17583 bool is_old = 17734 bool is_old =
17584 info->IsInterpreted() 17735 info->IsInterpreted()
17585 ? info->bytecode_array()->IsOld() 17736 ? info->bytecode_array()->IsOld()
17586 : info->code()->kind() != Code::FUNCTION || info->code()->IsOld(); 17737 : info->code()->kind() != Code::FUNCTION || info->code()->IsOld();
17587 if (is_old) { 17738 if (is_old) {
17588 NoWriteBarrierSet(this, entry_index, the_hole_value); 17739 for (int i = 0; i < kEntrySize; i++) {
17589 NoWriteBarrierSet(this, value_index, the_hole_value); 17740 NoWriteBarrierSet(this, entry_index + i, the_hole_value);
17741 }
17590 ElementRemoved(); 17742 ElementRemoved();
17591 } 17743 }
17592 } 17744 }
17593 } 17745 }
17594 } 17746 }
17595 17747
17596 17748
17597 void CompilationCacheTable::Remove(Object* value) { 17749 void CompilationCacheTable::Remove(Object* value) {
17598 DisallowHeapAllocation no_allocation; 17750 DisallowHeapAllocation no_allocation;
17599 Object* the_hole_value = GetHeap()->the_hole_value(); 17751 Object* the_hole_value = GetHeap()->the_hole_value();
17600 for (int entry = 0, size = Capacity(); entry < size; entry++) { 17752 for (int entry = 0, size = Capacity(); entry < size; entry++) {
17601 int entry_index = EntryToIndex(entry); 17753 int entry_index = EntryToIndex(entry);
17602 int value_index = entry_index + 1; 17754 int value_index = entry_index + 1;
17603 if (get(value_index) == value) { 17755 if (get(value_index) == value) {
17604 NoWriteBarrierSet(this, entry_index, the_hole_value); 17756 for (int i = 0; i < kEntrySize; i++) {
17605 NoWriteBarrierSet(this, value_index, the_hole_value); 17757 NoWriteBarrierSet(this, entry_index + i, the_hole_value);
17758 }
17606 ElementRemoved(); 17759 ElementRemoved();
17607 } 17760 }
17608 } 17761 }
17609 return; 17762 return;
17610 } 17763 }
17611 17764
17612 template <typename Derived, typename Shape, typename Key> 17765 template <typename Derived, typename Shape, typename Key>
17613 Handle<Derived> Dictionary<Derived, Shape, Key>::New( 17766 Handle<Derived> Dictionary<Derived, Shape, Key>::New(
17614 Isolate* isolate, int at_least_space_for, PretenureFlag pretenure, 17767 Isolate* isolate, int at_least_space_for, PretenureFlag pretenure,
17615 MinimumCapacity capacity_option) { 17768 MinimumCapacity capacity_option) {
(...skipping 2409 matching lines...) Expand 10 before | Expand all | Expand 10 after
20025 // depend on this. 20178 // depend on this.
20026 return DICTIONARY_ELEMENTS; 20179 return DICTIONARY_ELEMENTS;
20027 } 20180 }
20028 DCHECK_LE(kind, LAST_ELEMENTS_KIND); 20181 DCHECK_LE(kind, LAST_ELEMENTS_KIND);
20029 return kind; 20182 return kind;
20030 } 20183 }
20031 } 20184 }
20032 20185
20033 } // namespace internal 20186 } // namespace internal
20034 } // namespace v8 20187 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698