| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 9558 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9569 // No write barrier required, since the builtin is part of the root set. | 9569 // No write barrier required, since the builtin is part of the root set. |
| 9570 } | 9570 } |
| 9571 | 9571 |
| 9572 | 9572 |
| 9573 void SharedFunctionInfo::AddToOptimizedCodeMap( | 9573 void SharedFunctionInfo::AddToOptimizedCodeMap( |
| 9574 Handle<SharedFunctionInfo> shared, | 9574 Handle<SharedFunctionInfo> shared, |
| 9575 Handle<Context> native_context, | 9575 Handle<Context> native_context, |
| 9576 Handle<Code> code, | 9576 Handle<Code> code, |
| 9577 Handle<FixedArray> literals, | 9577 Handle<FixedArray> literals, |
| 9578 BailoutId osr_ast_id) { | 9578 BailoutId osr_ast_id) { |
| 9579 CALL_HEAP_FUNCTION_VOID( | 9579 Isolate* isolate = shared->GetIsolate(); |
| 9580 shared->GetIsolate(), | |
| 9581 shared->AddToOptimizedCodeMap( | |
| 9582 *native_context, *code, *literals, osr_ast_id)); | |
| 9583 } | |
| 9584 | |
| 9585 | |
| 9586 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, | |
| 9587 Code* code, | |
| 9588 FixedArray* literals, | |
| 9589 BailoutId osr_ast_id) { | |
| 9590 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 9580 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 9591 ASSERT(native_context->IsNativeContext()); | 9581 ASSERT(native_context->IsNativeContext()); |
| 9592 STATIC_ASSERT(kEntryLength == 4); | 9582 STATIC_ASSERT(kEntryLength == 4); |
| 9593 Heap* heap = GetHeap(); | 9583 Handle<FixedArray> new_code_map; |
| 9594 FixedArray* new_code_map; | 9584 Handle<Object> value(shared->optimized_code_map(), isolate); |
| 9595 Object* value = optimized_code_map(); | 9585 int old_length; |
| 9596 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | |
| 9597 if (value->IsSmi()) { | 9586 if (value->IsSmi()) { |
| 9598 // No optimized code map. | 9587 // No optimized code map. |
| 9599 ASSERT_EQ(0, Smi::cast(value)->value()); | 9588 ASSERT_EQ(0, Smi::cast(*value)->value()); |
| 9600 // Create 3 entries per context {context, code, literals}. | 9589 // Create 3 entries per context {context, code, literals}. |
| 9601 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength); | 9590 new_code_map = isolate->factory()->NewFixedArray(kInitialLength); |
| 9602 if (!maybe->To(&new_code_map)) return maybe; | 9591 old_length = kEntriesStart; |
| 9603 new_code_map->set(kEntriesStart + kContextOffset, native_context); | |
| 9604 new_code_map->set(kEntriesStart + kCachedCodeOffset, code); | |
| 9605 new_code_map->set(kEntriesStart + kLiteralsOffset, literals); | |
| 9606 new_code_map->set(kEntriesStart + kOsrAstIdOffset, osr_ast_id_smi); | |
| 9607 } else { | 9592 } else { |
| 9608 // Copy old map and append one new entry. | 9593 // Copy old map and append one new entry. |
| 9609 FixedArray* old_code_map = FixedArray::cast(value); | 9594 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); |
| 9610 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context, osr_ast_id)); | 9595 ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id)); |
| 9611 int old_length = old_code_map->length(); | 9596 old_length = old_code_map->length(); |
| 9612 int new_length = old_length + kEntryLength; | 9597 new_code_map = isolate->factory()->CopySizeFixedArray( |
| 9613 MaybeObject* maybe = old_code_map->CopySize(new_length); | 9598 old_code_map, old_length + kEntryLength); |
| 9614 if (!maybe->To(&new_code_map)) return maybe; | |
| 9615 new_code_map->set(old_length + kContextOffset, native_context); | |
| 9616 new_code_map->set(old_length + kCachedCodeOffset, code); | |
| 9617 new_code_map->set(old_length + kLiteralsOffset, literals); | |
| 9618 new_code_map->set(old_length + kOsrAstIdOffset, osr_ast_id_smi); | |
| 9619 // Zap the old map for the sake of the heap verifier. | 9599 // Zap the old map for the sake of the heap verifier. |
| 9620 if (Heap::ShouldZapGarbage()) { | 9600 if (Heap::ShouldZapGarbage()) { |
| 9621 Object** data = old_code_map->data_start(); | 9601 Object** data = old_code_map->data_start(); |
| 9622 MemsetPointer(data, heap->the_hole_value(), old_length); | 9602 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length); |
| 9623 } | 9603 } |
| 9624 } | 9604 } |
| 9605 new_code_map->set(old_length + kContextOffset, *native_context); |
| 9606 new_code_map->set(old_length + kCachedCodeOffset, *code); |
| 9607 new_code_map->set(old_length + kLiteralsOffset, *literals); |
| 9608 new_code_map->set(old_length + kOsrAstIdOffset, |
| 9609 Smi::FromInt(osr_ast_id.ToInt())); |
| 9610 |
| 9625 #ifdef DEBUG | 9611 #ifdef DEBUG |
| 9626 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { | 9612 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
| 9627 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext()); | 9613 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext()); |
| 9628 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode()); | 9614 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode()); |
| 9629 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == | 9615 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == |
| 9630 Code::OPTIMIZED_FUNCTION); | 9616 Code::OPTIMIZED_FUNCTION); |
| 9631 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); | 9617 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); |
| 9632 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); | 9618 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
| 9633 } | 9619 } |
| 9634 #endif | 9620 #endif |
| 9635 set_optimized_code_map(new_code_map); | 9621 shared->set_optimized_code_map(*new_code_map); |
| 9636 return new_code_map; | |
| 9637 } | 9622 } |
| 9638 | 9623 |
| 9639 | 9624 |
| 9640 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { | 9625 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { |
| 9641 ASSERT(index > kEntriesStart); | 9626 ASSERT(index > kEntriesStart); |
| 9642 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 9627 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
| 9643 if (!bound()) { | 9628 if (!bound()) { |
| 9644 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); | 9629 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); |
| 9645 ASSERT_NE(NULL, cached_literals); | 9630 ASSERT_NE(NULL, cached_literals); |
| 9646 return cached_literals; | 9631 return cached_literals; |
| (...skipping 21 matching lines...) Expand all Loading... |
| 9668 flusher->EvictOptimizedCodeMap(this); | 9653 flusher->EvictOptimizedCodeMap(this); |
| 9669 } | 9654 } |
| 9670 | 9655 |
| 9671 ASSERT(code_map->get(kNextMapIndex)->IsUndefined()); | 9656 ASSERT(code_map->get(kNextMapIndex)->IsUndefined()); |
| 9672 set_optimized_code_map(Smi::FromInt(0)); | 9657 set_optimized_code_map(Smi::FromInt(0)); |
| 9673 } | 9658 } |
| 9674 | 9659 |
| 9675 | 9660 |
| 9676 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 9661 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
| 9677 const char* reason) { | 9662 const char* reason) { |
| 9663 DisallowHeapAllocation no_gc; |
| 9678 if (optimized_code_map()->IsSmi()) return; | 9664 if (optimized_code_map()->IsSmi()) return; |
| 9679 | 9665 |
| 9680 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 9666 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
| 9681 int dst = kEntriesStart; | 9667 int dst = kEntriesStart; |
| 9682 int length = code_map->length(); | 9668 int length = code_map->length(); |
| 9683 for (int src = kEntriesStart; src < length; src += kEntryLength) { | 9669 for (int src = kEntriesStart; src < length; src += kEntryLength) { |
| 9684 ASSERT(code_map->get(src)->IsNativeContext()); | 9670 ASSERT(code_map->get(src)->IsNativeContext()); |
| 9685 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) { | 9671 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) { |
| 9686 // Evict the src entry by not copying it to the dst entry. | 9672 // Evict the src entry by not copying it to the dst entry. |
| 9687 if (FLAG_trace_opt) { | 9673 if (FLAG_trace_opt) { |
| (...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 10257 | 10243 |
| 10258 // Give the correct expected_nof_properties to initial maps created later. | 10244 // Give the correct expected_nof_properties to initial maps created later. |
| 10259 ASSERT(expected_nof_properties() >= slack); | 10245 ASSERT(expected_nof_properties() >= slack); |
| 10260 set_expected_nof_properties(expected_nof_properties() - slack); | 10246 set_expected_nof_properties(expected_nof_properties() - slack); |
| 10261 } | 10247 } |
| 10262 } | 10248 } |
| 10263 | 10249 |
| 10264 | 10250 |
| 10265 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context, | 10251 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context, |
| 10266 BailoutId osr_ast_id) { | 10252 BailoutId osr_ast_id) { |
| 10253 DisallowHeapAllocation no_gc; |
| 10267 ASSERT(native_context->IsNativeContext()); | 10254 ASSERT(native_context->IsNativeContext()); |
| 10268 if (!FLAG_cache_optimized_code) return -1; | 10255 if (!FLAG_cache_optimized_code) return -1; |
| 10269 Object* value = optimized_code_map(); | 10256 Object* value = optimized_code_map(); |
| 10270 if (!value->IsSmi()) { | 10257 if (!value->IsSmi()) { |
| 10271 FixedArray* optimized_code_map = FixedArray::cast(value); | 10258 FixedArray* optimized_code_map = FixedArray::cast(value); |
| 10272 int length = optimized_code_map->length(); | 10259 int length = optimized_code_map->length(); |
| 10273 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); | 10260 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); |
| 10274 for (int i = kEntriesStart; i < length; i += kEntryLength) { | 10261 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
| 10275 if (optimized_code_map->get(i + kContextOffset) == native_context && | 10262 if (optimized_code_map->get(i + kContextOffset) == native_context && |
| 10276 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { | 10263 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { |
| (...skipping 6217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 16494 #define ERROR_MESSAGES_TEXTS(C, T) T, | 16481 #define ERROR_MESSAGES_TEXTS(C, T) T, |
| 16495 static const char* error_messages_[] = { | 16482 static const char* error_messages_[] = { |
| 16496 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) | 16483 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) |
| 16497 }; | 16484 }; |
| 16498 #undef ERROR_MESSAGES_TEXTS | 16485 #undef ERROR_MESSAGES_TEXTS |
| 16499 return error_messages_[reason]; | 16486 return error_messages_[reason]; |
| 16500 } | 16487 } |
| 16501 | 16488 |
| 16502 | 16489 |
| 16503 } } // namespace v8::internal | 16490 } } // namespace v8::internal |
| OLD | NEW |