Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(250)

Side by Side Diff: src/objects.cc

Issue 101853003: Cache optimized code for OSR. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: addressed comments Created 7 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/mark-compact.cc ('K') | « src/objects.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 9503 matching lines...) Expand 10 before | Expand all | Expand 10 after
9514 set_code_no_write_barrier( 9514 set_code_no_write_barrier(
9515 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue)); 9515 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9516 // No write barrier required, since the builtin is part of the root set. 9516 // No write barrier required, since the builtin is part of the root set.
9517 } 9517 }
9518 9518
9519 9519
9520 void SharedFunctionInfo::AddToOptimizedCodeMap( 9520 void SharedFunctionInfo::AddToOptimizedCodeMap(
9521 Handle<SharedFunctionInfo> shared, 9521 Handle<SharedFunctionInfo> shared,
9522 Handle<Context> native_context, 9522 Handle<Context> native_context,
9523 Handle<Code> code, 9523 Handle<Code> code,
9524 Handle<FixedArray> literals) { 9524 Handle<FixedArray> literals,
9525 BailoutId osr_ast_id) {
9525 CALL_HEAP_FUNCTION_VOID( 9526 CALL_HEAP_FUNCTION_VOID(
9526 shared->GetIsolate(), 9527 shared->GetIsolate(),
9527 shared->AddToOptimizedCodeMap(*native_context, *code, *literals)); 9528 shared->AddToOptimizedCodeMap(
9529 *native_context, *code, *literals, osr_ast_id));
9528 } 9530 }
9529 9531
9530 9532
9531 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, 9533 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context,
9532 Code* code, 9534 Code* code,
9533 FixedArray* literals) { 9535 FixedArray* literals,
9536 BailoutId osr_ast_id) {
9534 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 9537 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
9535 ASSERT(native_context->IsNativeContext()); 9538 ASSERT(native_context->IsNativeContext());
9536 STATIC_ASSERT(kEntryLength == 3); 9539 STATIC_ASSERT(kEntryLength == 4);
9537 Heap* heap = GetHeap(); 9540 Heap* heap = GetHeap();
9538 FixedArray* new_code_map; 9541 FixedArray* new_code_map;
9539 Object* value = optimized_code_map(); 9542 Object* value = optimized_code_map();
9543 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
9540 if (value->IsSmi()) { 9544 if (value->IsSmi()) {
9541 // No optimized code map. 9545 // No optimized code map.
9542 ASSERT_EQ(0, Smi::cast(value)->value()); 9546 ASSERT_EQ(0, Smi::cast(value)->value());
9543 // Create 3 entries per context {context, code, literals}. 9547 // Create 3 entries per context {context, code, literals}.
9544 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength); 9548 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength);
9545 if (!maybe->To(&new_code_map)) return maybe; 9549 if (!maybe->To(&new_code_map)) return maybe;
9546 new_code_map->set(kEntriesStart + 0, native_context); 9550 new_code_map->set(kEntriesStart + kContextOffset, native_context);
9547 new_code_map->set(kEntriesStart + 1, code); 9551 new_code_map->set(kEntriesStart + kCachedCodeOffset, code);
9548 new_code_map->set(kEntriesStart + 2, literals); 9552 new_code_map->set(kEntriesStart + kLiteralsOffset, literals);
9553 new_code_map->set(kEntriesStart + kOsrAstIdOffset, osr_ast_id_smi);
9549 } else { 9554 } else {
9550 // Copy old map and append one new entry. 9555 // Copy old map and append one new entry.
9551 FixedArray* old_code_map = FixedArray::cast(value); 9556 FixedArray* old_code_map = FixedArray::cast(value);
9552 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context)); 9557 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context, osr_ast_id));
9553 int old_length = old_code_map->length(); 9558 int old_length = old_code_map->length();
9554 int new_length = old_length + kEntryLength; 9559 int new_length = old_length + kEntryLength;
9555 MaybeObject* maybe = old_code_map->CopySize(new_length); 9560 MaybeObject* maybe = old_code_map->CopySize(new_length);
9556 if (!maybe->To(&new_code_map)) return maybe; 9561 if (!maybe->To(&new_code_map)) return maybe;
9557 new_code_map->set(old_length + 0, native_context); 9562 new_code_map->set(old_length + kContextOffset, native_context);
9558 new_code_map->set(old_length + 1, code); 9563 new_code_map->set(old_length + kCachedCodeOffset, code);
9559 new_code_map->set(old_length + 2, literals); 9564 new_code_map->set(old_length + kLiteralsOffset, literals);
9565 new_code_map->set(old_length + kOsrAstIdOffset, osr_ast_id_smi);
9560 // Zap the old map for the sake of the heap verifier. 9566 // Zap the old map for the sake of the heap verifier.
9561 if (Heap::ShouldZapGarbage()) { 9567 if (Heap::ShouldZapGarbage()) {
9562 Object** data = old_code_map->data_start(); 9568 Object** data = old_code_map->data_start();
9563 MemsetPointer(data, heap->the_hole_value(), old_length); 9569 MemsetPointer(data, heap->the_hole_value(), old_length);
9564 } 9570 }
9565 } 9571 }
9566 #ifdef DEBUG 9572 #ifdef DEBUG
9567 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { 9573 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9568 ASSERT(new_code_map->get(i)->IsNativeContext()); 9574 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext());
9569 ASSERT(new_code_map->get(i + 1)->IsCode()); 9575 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9570 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == 9576 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9571 Code::OPTIMIZED_FUNCTION); 9577 Code::OPTIMIZED_FUNCTION);
9572 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); 9578 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9579 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9573 } 9580 }
9574 #endif 9581 #endif
9575 set_optimized_code_map(new_code_map); 9582 set_optimized_code_map(new_code_map);
9576 return new_code_map; 9583 return new_code_map;
9577 } 9584 }
9578 9585
9579 9586
9580 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { 9587 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9581 ASSERT(index > kEntriesStart); 9588 ASSERT(index > kEntriesStart);
9582 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9589 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9583 if (!bound()) { 9590 if (!bound()) {
9584 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); 9591 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9585 ASSERT_NE(NULL, cached_literals); 9592 ASSERT_NE(NULL, cached_literals);
9586 return cached_literals; 9593 return cached_literals;
9587 } 9594 }
9588 return NULL; 9595 return NULL;
9589 } 9596 }
9590 9597
9591 9598
9592
9593 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) { 9599 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9594 ASSERT(index > kEntriesStart); 9600 ASSERT(index > kEntriesStart);
9595 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9601 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9596 Code* code = Code::cast(code_map->get(index)); 9602 Code* code = Code::cast(code_map->get(index));
9597 ASSERT_NE(NULL, code); 9603 ASSERT_NE(NULL, code);
9598 return code; 9604 return code;
9599 } 9605 }
9600 9606
9601 9607
9602 void SharedFunctionInfo::ClearOptimizedCodeMap() { 9608 void SharedFunctionInfo::ClearOptimizedCodeMap() {
(...skipping 24 matching lines...) Expand all
9627 if (FLAG_trace_opt) { 9633 if (FLAG_trace_opt) {
9628 PrintF("[evicting entry from optimizing code map (%s) for ", reason); 9634 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9629 ShortPrint(); 9635 ShortPrint();
9630 PrintF("]\n"); 9636 PrintF("]\n");
9631 } 9637 }
9632 removed_entry = true; 9638 removed_entry = true;
9633 break; 9639 break;
9634 } 9640 }
9635 } 9641 }
9636 while (i < (code_map->length() - kEntryLength)) { 9642 while (i < (code_map->length() - kEntryLength)) {
9637 code_map->set(i, code_map->get(i + kEntryLength)); 9643 code_map->set(i + kContextOffset,
titzer 2013/12/16 11:59:34 Why not an inner loop over entry length somehow?
9638 code_map->set(i + 1, code_map->get(i + 1 + kEntryLength)); 9644 code_map->get(i + kContextOffset + kEntryLength));
9639 code_map->set(i + 2, code_map->get(i + 2 + kEntryLength)); 9645 code_map->set(i + kCodeOffset,
9646 code_map->get(i + kCodeOffset + kEntryLength));
9647 code_map->set(i + kLiteralsOffset,
9648 code_map->get(i + kLiteralsOffset + kEntryLength));
9649 code_map->set(i + kOsrAstIdOffset,
9650 code_map->get(i + kOsrAstIdOffset + kEntryLength));
9640 i += kEntryLength; 9651 i += kEntryLength;
9641 } 9652 }
9642 if (removed_entry) { 9653 if (removed_entry) {
9643 // Always trim even when array is cleared because of heap verifier. 9654 // Always trim even when array is cleared because of heap verifier.
9644 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength); 9655 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength);
9645 if (code_map->length() == kEntriesStart) { 9656 if (code_map->length() == kEntriesStart) {
9646 ClearOptimizedCodeMap(); 9657 ClearOptimizedCodeMap();
9647 } 9658 }
9648 } 9659 }
9649 } 9660 }
(...skipping 541 matching lines...) Expand 10 before | Expand all | Expand 10 after
10191 // Resize the initial map and all maps in its transition tree. 10202 // Resize the initial map and all maps in its transition tree.
10192 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack); 10203 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10193 10204
10194 // Give the correct expected_nof_properties to initial maps created later. 10205 // Give the correct expected_nof_properties to initial maps created later.
10195 ASSERT(expected_nof_properties() >= slack); 10206 ASSERT(expected_nof_properties() >= slack);
10196 set_expected_nof_properties(expected_nof_properties() - slack); 10207 set_expected_nof_properties(expected_nof_properties() - slack);
10197 } 10208 }
10198 } 10209 }
10199 10210
10200 10211
10201 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) { 10212 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10213 BailoutId osr_ast_id) {
10202 ASSERT(native_context->IsNativeContext()); 10214 ASSERT(native_context->IsNativeContext());
10203 if (!FLAG_cache_optimized_code) return -1; 10215 if (!FLAG_cache_optimized_code) return -1;
10204 Object* value = optimized_code_map(); 10216 Object* value = optimized_code_map();
10205 if (!value->IsSmi()) { 10217 if (!value->IsSmi()) {
10206 FixedArray* optimized_code_map = FixedArray::cast(value); 10218 FixedArray* optimized_code_map = FixedArray::cast(value);
10207 int length = optimized_code_map->length(); 10219 int length = optimized_code_map->length();
10220 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10208 for (int i = kEntriesStart; i < length; i += kEntryLength) { 10221 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10209 if (optimized_code_map->get(i) == native_context) { 10222 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10210 return i + 1; 10223 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10224 return i + kCachedCodeOffset;
10211 } 10225 }
10212 } 10226 }
10213 if (FLAG_trace_opt) { 10227 if (FLAG_trace_opt) {
10214 PrintF("[didn't find optimized code in optimized code map for "); 10228 PrintF("[didn't find optimized code in optimized code map for ");
10215 ShortPrint(); 10229 ShortPrint();
10216 PrintF("]\n"); 10230 PrintF("]\n");
10217 } 10231 }
10218 } 10232 }
10219 return -1; 10233 return -1;
10220 } 10234 }
(...skipping 6370 matching lines...) Expand 10 before | Expand all | Expand 10 after
16591 #define ERROR_MESSAGES_TEXTS(C, T) T, 16605 #define ERROR_MESSAGES_TEXTS(C, T) T,
16592 static const char* error_messages_[] = { 16606 static const char* error_messages_[] = {
16593 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) 16607 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
16594 }; 16608 };
16595 #undef ERROR_MESSAGES_TEXTS 16609 #undef ERROR_MESSAGES_TEXTS
16596 return error_messages_[reason]; 16610 return error_messages_[reason];
16597 } 16611 }
16598 16612
16599 16613
16600 } } // namespace v8::internal 16614 } } // namespace v8::internal
OLDNEW
« src/mark-compact.cc ('K') | « src/objects.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698