OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 9452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
9463 | 9463 |
9464 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { | 9464 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { |
9465 // Iterate over all fields in the body but take care in dealing with | 9465 // Iterate over all fields in the body but take care in dealing with |
9466 // the code entry. | 9466 // the code entry. |
9467 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset); | 9467 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset); |
9468 v->VisitCodeEntry(this->address() + kCodeEntryOffset); | 9468 v->VisitCodeEntry(this->address() + kCodeEntryOffset); |
9469 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size); | 9469 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size); |
9470 } | 9470 } |
9471 | 9471 |
9472 | 9472 |
9473 void JSFunction::MarkForLazyRecompilation() { | 9473 void JSFunction::MarkForCompileOptimized() { |
9474 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); | 9474 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); |
titzer
2013/12/09 14:49:28
I don't know why we assert is_compiled()
Yang
2013/12/10 11:22:04
The assumption is that we compiled unoptimized cod
| |
9475 ASSERT(!IsOptimized()); | 9475 ASSERT(!IsOptimized()); |
9476 ASSERT(shared()->allows_lazy_compilation() || | 9476 ASSERT(shared()->allows_lazy_compilation() || |
9477 code()->optimizable()); | 9477 code()->optimizable()); |
9478 ASSERT(!shared()->is_generator()); | 9478 ASSERT(!shared()->is_generator()); |
9479 set_code_no_write_barrier( | 9479 set_code_no_write_barrier( |
9480 GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile)); | 9480 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized)); |
9481 // No write barrier required, since the builtin is part of the root set. | 9481 // No write barrier required, since the builtin is part of the root set. |
9482 } | 9482 } |
9483 | 9483 |
9484 | 9484 |
9485 void JSFunction::MarkForConcurrentRecompilation() { | 9485 void JSFunction::MarkForCompileOptimizedConcurrent() { |
9486 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); | 9486 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); |
titzer
2013/12/09 14:49:28
Same.
| |
9487 ASSERT(!IsOptimized()); | 9487 ASSERT(!IsOptimized()); |
titzer
2013/12/09 14:49:28
Or this.
Yang
2013/12/10 11:22:04
To guard that we don't optimize something we alrea
| |
9488 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); | 9488 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); |
9489 ASSERT(!shared()->is_generator()); | 9489 ASSERT(!shared()->is_generator()); |
titzer
2013/12/09 14:49:28
Or that.
Yang
2013/12/10 11:22:04
Generators are not optimizable, and we should have
| |
9490 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); | 9490 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); |
9491 if (FLAG_trace_concurrent_recompilation) { | 9491 if (FLAG_trace_concurrent_recompilation) { |
9492 PrintF(" ** Marking "); | 9492 PrintF(" ** Marking "); |
9493 PrintName(); | 9493 PrintName(); |
9494 PrintF(" for concurrent recompilation.\n"); | 9494 PrintF(" for concurrent recompilation.\n"); |
9495 } | 9495 } |
9496 set_code_no_write_barrier( | 9496 set_code_no_write_barrier( |
9497 GetIsolate()->builtins()->builtin(Builtins::kConcurrentRecompile)); | 9497 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); |
9498 // No write barrier required, since the builtin is part of the root set. | 9498 // No write barrier required, since the builtin is part of the root set. |
9499 } | 9499 } |
9500 | 9500 |
9501 | 9501 |
9502 void JSFunction::MarkInRecompileQueue() { | 9502 void JSFunction::MarkInOptimizationQueue() { |
9503 // We can only arrive here via the concurrent-recompilation builtin. If | 9503 // We can only arrive here via the concurrent-recompilation builtin. If |
9504 // break points were set, the code would point to the lazy-compile builtin. | 9504 // break points were set, the code would point to the lazy-compile builtin. |
9505 ASSERT(!GetIsolate()->DebuggerHasBreakPoints()); | 9505 ASSERT(!GetIsolate()->DebuggerHasBreakPoints()); |
9506 ASSERT(IsMarkedForConcurrentRecompilation() && !IsOptimized()); | 9506 ASSERT(IsMarkedForCompileOptimizedConcurrent() && !IsOptimized()); |
9507 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); | 9507 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); |
9508 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); | 9508 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); |
9509 if (FLAG_trace_concurrent_recompilation) { | 9509 if (FLAG_trace_concurrent_recompilation) { |
9510 PrintF(" ** Queueing "); | 9510 PrintF(" ** Queueing "); |
9511 PrintName(); | 9511 PrintName(); |
9512 PrintF(" for concurrent recompilation.\n"); | 9512 PrintF(" for concurrent recompilation.\n"); |
9513 } | 9513 } |
9514 set_code_no_write_barrier( | 9514 set_code_no_write_barrier( |
9515 GetIsolate()->builtins()->builtin(Builtins::kInRecompileQueue)); | 9515 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue)); |
9516 // No write barrier required, since the builtin is part of the root set. | 9516 // No write barrier required, since the builtin is part of the root set. |
9517 } | 9517 } |
9518 | 9518 |
9519 | 9519 |
9520 static bool CompileLazyHelper(CompilationInfo* info, | |
9521 ClearExceptionFlag flag) { | |
9522 // Compile the source information to a code object. | |
9523 ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled()); | |
9524 ASSERT(!info->isolate()->has_pending_exception()); | |
9525 bool result = Compiler::CompileLazy(info); | |
9526 ASSERT(result != info->isolate()->has_pending_exception()); | |
9527 if (!result && flag == CLEAR_EXCEPTION) { | |
9528 info->isolate()->clear_pending_exception(); | |
9529 } | |
9530 return result; | |
9531 } | |
9532 | |
9533 | |
9534 bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared, | |
9535 ClearExceptionFlag flag) { | |
9536 ASSERT(shared->allows_lazy_compilation_without_context()); | |
9537 CompilationInfoWithZone info(shared); | |
9538 return CompileLazyHelper(&info, flag); | |
9539 } | |
9540 | |
9541 | |
9542 void SharedFunctionInfo::AddToOptimizedCodeMap( | 9520 void SharedFunctionInfo::AddToOptimizedCodeMap( |
9543 Handle<SharedFunctionInfo> shared, | 9521 Handle<SharedFunctionInfo> shared, |
9544 Handle<Context> native_context, | 9522 Handle<Context> native_context, |
9545 Handle<Code> code, | 9523 Handle<Code> code, |
9546 Handle<FixedArray> literals) { | 9524 Handle<FixedArray> literals) { |
9547 CALL_HEAP_FUNCTION_VOID( | 9525 CALL_HEAP_FUNCTION_VOID( |
9548 shared->GetIsolate(), | 9526 shared->GetIsolate(), |
9549 shared->AddToOptimizedCodeMap(*native_context, *code, *literals)); | 9527 shared->AddToOptimizedCodeMap(*native_context, *code, *literals)); |
9550 } | 9528 } |
9551 | 9529 |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
9592 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == | 9570 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == |
9593 Code::OPTIMIZED_FUNCTION); | 9571 Code::OPTIMIZED_FUNCTION); |
9594 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); | 9572 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); |
9595 } | 9573 } |
9596 #endif | 9574 #endif |
9597 set_optimized_code_map(new_code_map); | 9575 set_optimized_code_map(new_code_map); |
9598 return new_code_map; | 9576 return new_code_map; |
9599 } | 9577 } |
9600 | 9578 |
9601 | 9579 |
9602 void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function, | 9580 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { |
9603 int index) { | |
9604 ASSERT(index > kEntriesStart); | 9581 ASSERT(index > kEntriesStart); |
9605 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 9582 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
9606 if (!bound()) { | 9583 if (!bound()) { |
9607 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); | 9584 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); |
9608 ASSERT(cached_literals != NULL); | 9585 ASSERT_NE(NULL, cached_literals); |
9609 function->set_literals(cached_literals); | 9586 return cached_literals; |
9610 } | 9587 } |
9611 Code* code = Code::cast(code_map->get(index)); | 9588 return NULL; |
9612 ASSERT(code != NULL); | |
9613 ASSERT(function->context()->native_context() == code_map->get(index - 1)); | |
9614 function->ReplaceCode(code); | |
9615 } | 9589 } |
9616 | 9590 |
9617 | 9591 |
9592 | |
9593 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) { | |
9594 ASSERT(index > kEntriesStart); | |
titzer
2013/12/09 14:49:28
>=?
Yang
2013/12/10 11:22:04
Each entry of the optimized code map consists of n
| |
9595 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | |
9596 Code* code = Code::cast(code_map->get(index)); | |
9597 ASSERT_NE(NULL, code); | |
9598 return code; | |
9599 } | |
9600 | |
9601 | |
9618 void SharedFunctionInfo::ClearOptimizedCodeMap() { | 9602 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
9619 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 9603 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
9620 | 9604 |
9621 // If the next map link slot is already used then the function was | 9605 // If the next map link slot is already used then the function was |
9622 // enqueued with code flushing and we remove it now. | 9606 // enqueued with code flushing and we remove it now. |
9623 if (!code_map->get(kNextMapIndex)->IsUndefined()) { | 9607 if (!code_map->get(kNextMapIndex)->IsUndefined()) { |
9624 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); | 9608 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); |
9625 flusher->EvictOptimizedCodeMap(this); | 9609 flusher->EvictOptimizedCodeMap(this); |
9626 } | 9610 } |
9627 | 9611 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
9670 ASSERT(shrink_by % kEntryLength == 0); | 9654 ASSERT(shrink_by % kEntryLength == 0); |
9671 ASSERT(shrink_by <= code_map->length() - kEntriesStart); | 9655 ASSERT(shrink_by <= code_map->length() - kEntriesStart); |
9672 // Always trim even when array is cleared because of heap verifier. | 9656 // Always trim even when array is cleared because of heap verifier. |
9673 RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by); | 9657 RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by); |
9674 if (code_map->length() == kEntriesStart) { | 9658 if (code_map->length() == kEntriesStart) { |
9675 ClearOptimizedCodeMap(); | 9659 ClearOptimizedCodeMap(); |
9676 } | 9660 } |
9677 } | 9661 } |
9678 | 9662 |
9679 | 9663 |
9680 bool JSFunction::CompileLazy(Handle<JSFunction> function, | |
9681 ClearExceptionFlag flag) { | |
9682 bool result = true; | |
9683 if (function->shared()->is_compiled()) { | |
9684 function->ReplaceCode(function->shared()->code()); | |
9685 } else { | |
9686 ASSERT(function->shared()->allows_lazy_compilation()); | |
9687 CompilationInfoWithZone info(function); | |
9688 result = CompileLazyHelper(&info, flag); | |
9689 ASSERT(!result || function->is_compiled()); | |
9690 } | |
9691 return result; | |
9692 } | |
9693 | |
9694 | |
9695 Handle<Code> JSFunction::CompileOsr(Handle<JSFunction> function, | |
9696 BailoutId osr_ast_id, | |
9697 ClearExceptionFlag flag) { | |
9698 CompilationInfoWithZone info(function); | |
9699 info.SetOptimizing(osr_ast_id); | |
9700 if (CompileLazyHelper(&info, flag)) { | |
9701 // TODO(titzer): don't install the OSR code. | |
9702 // ASSERT(function->code() != *info.code()); | |
9703 return info.code(); | |
9704 } else { | |
9705 return Handle<Code>::null(); | |
9706 } | |
9707 } | |
9708 | |
9709 | |
9710 bool JSFunction::CompileOptimized(Handle<JSFunction> function, | |
9711 ClearExceptionFlag flag) { | |
9712 CompilationInfoWithZone info(function); | |
9713 info.SetOptimizing(BailoutId::None()); | |
9714 return CompileLazyHelper(&info, flag); | |
9715 } | |
9716 | |
9717 | |
9718 bool JSFunction::EnsureCompiled(Handle<JSFunction> function, | |
9719 ClearExceptionFlag flag) { | |
9720 return function->is_compiled() || CompileLazy(function, flag); | |
9721 } | |
9722 | |
9723 | |
9724 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) { | 9664 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) { |
9725 if (object->IsGlobalObject()) return; | 9665 if (object->IsGlobalObject()) return; |
9726 | 9666 |
9727 // Make sure prototypes are fast objects and their maps have the bit set | 9667 // Make sure prototypes are fast objects and their maps have the bit set |
9728 // so they remain fast. | 9668 // so they remain fast. |
9729 if (!object->HasFastProperties()) { | 9669 if (!object->HasFastProperties()) { |
9730 TransformToFastProperties(object, 0); | 9670 TransformToFastProperties(object, 0); |
9731 } | 9671 } |
9732 } | 9672 } |
9733 | 9673 |
(...skipping 6917 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
16651 #define ERROR_MESSAGES_TEXTS(C, T) T, | 16591 #define ERROR_MESSAGES_TEXTS(C, T) T, |
16652 static const char* error_messages_[] = { | 16592 static const char* error_messages_[] = { |
16653 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) | 16593 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) |
16654 }; | 16594 }; |
16655 #undef ERROR_MESSAGES_TEXTS | 16595 #undef ERROR_MESSAGES_TEXTS |
16656 return error_messages_[reason]; | 16596 return error_messages_[reason]; |
16657 } | 16597 } |
16658 | 16598 |
16659 | 16599 |
16660 } } // namespace v8::internal | 16600 } } // namespace v8::internal |
OLD | NEW |