OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
113 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling() | 113 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling() |
114 ? new List<OffsetRange>(2) : NULL; | 114 ? new List<OffsetRange>(2) : NULL; |
115 for (int i = 0; i < DependentCode::kGroupCount; i++) { | 115 for (int i = 0; i < DependentCode::kGroupCount; i++) { |
116 dependencies_[i] = NULL; | 116 dependencies_[i] = NULL; |
117 } | 117 } |
118 if (mode == STUB) { | 118 if (mode == STUB) { |
119 mode_ = STUB; | 119 mode_ = STUB; |
120 return; | 120 return; |
121 } | 121 } |
122 mode_ = V8::UseCrankshaft() ? mode : NONOPT; | 122 mode_ = V8::UseCrankshaft() ? mode : NONOPT; |
123 abort_due_to_map_dependency_ = false; | |
123 if (script_->type()->value() == Script::TYPE_NATIVE) { | 124 if (script_->type()->value() == Script::TYPE_NATIVE) { |
124 MarkAsNative(); | 125 MarkAsNative(); |
125 } | 126 } |
126 if (!shared_info_.is_null()) { | 127 if (!shared_info_.is_null()) { |
127 ASSERT(language_mode() == CLASSIC_MODE); | 128 ASSERT(language_mode() == CLASSIC_MODE); |
128 SetLanguageMode(shared_info_->language_mode()); | 129 SetLanguageMode(shared_info_->language_mode()); |
129 } | 130 } |
130 set_bailout_reason(kUnknown); | 131 set_bailout_reason(kUnknown); |
131 } | 132 } |
132 | 133 |
(...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
439 ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL); | 440 ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL); |
440 if (graph_ == NULL) { | 441 if (graph_ == NULL) { |
441 if (graph_builder_->inline_bailout()) { | 442 if (graph_builder_->inline_bailout()) { |
442 info_->AbortOptimization(); | 443 info_->AbortOptimization(); |
443 return SetLastStatus(BAILED_OUT); | 444 return SetLastStatus(BAILED_OUT); |
444 } else { | 445 } else { |
445 return AbortOptimization(); | 446 return AbortOptimization(); |
446 } | 447 } |
447 } | 448 } |
448 | 449 |
450 if (info()->HasAbortedDueToDependencyChange()) { | |
451 info_->set_bailout_reason(kBailedOutDueToDependentMap); | |
Jakob Kummerow
2013/08/12 13:48:24
same naming nit here
| |
452 info_->AbortOptimization(); | |
453 return SetLastStatus(BAILED_OUT); | |
454 } | |
455 | |
449 return SetLastStatus(SUCCEEDED); | 456 return SetLastStatus(SUCCEEDED); |
450 } | 457 } |
451 | 458 |
452 | 459 |
453 OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() { | 460 OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() { |
454 DisallowHeapAllocation no_allocation; | 461 DisallowHeapAllocation no_allocation; |
455 DisallowHandleAllocation no_handles; | 462 DisallowHandleAllocation no_handles; |
456 DisallowHandleDereference no_deref; | 463 DisallowHandleDereference no_deref; |
464 DisallowMapInvalidation no_map_invalidation; | |
457 | 465 |
458 ASSERT(last_status() == SUCCEEDED); | 466 ASSERT(last_status() == SUCCEEDED); |
459 Timer t(this, &time_taken_to_optimize_); | 467 Timer t(this, &time_taken_to_optimize_); |
460 ASSERT(graph_ != NULL); | 468 ASSERT(graph_ != NULL); |
461 BailoutReason bailout_reason = kNoReason; | 469 BailoutReason bailout_reason = kNoReason; |
462 if (!graph_->Optimize(&bailout_reason)) { | 470 if (!graph_->Optimize(&bailout_reason)) { |
463 if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason); | 471 if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason); |
464 return SetLastStatus(BAILED_OUT); | 472 return SetLastStatus(BAILED_OUT); |
465 } else { | 473 } else { |
466 chunk_ = LChunk::NewChunk(graph_); | 474 chunk_ = LChunk::NewChunk(graph_); |
467 if (chunk_ == NULL) { | 475 if (chunk_ == NULL) { |
468 return SetLastStatus(BAILED_OUT); | 476 return SetLastStatus(BAILED_OUT); |
469 } | 477 } |
470 } | 478 } |
471 return SetLastStatus(SUCCEEDED); | 479 return SetLastStatus(SUCCEEDED); |
472 } | 480 } |
473 | 481 |
474 | 482 |
475 OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() { | 483 OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() { |
476 ASSERT(last_status() == SUCCEEDED); | 484 ASSERT(last_status() == SUCCEEDED); |
485 ASSERT(!info()->HasAbortedDueToDependencyChange()); | |
486 DisallowMapInvalidation no_map_invalidation; | |
477 { // Scope for timer. | 487 { // Scope for timer. |
478 Timer timer(this, &time_taken_to_codegen_); | 488 Timer timer(this, &time_taken_to_codegen_); |
479 ASSERT(chunk_ != NULL); | 489 ASSERT(chunk_ != NULL); |
480 ASSERT(graph_ != NULL); | 490 ASSERT(graph_ != NULL); |
481 // Deferred handles reference objects that were accessible during | 491 // Deferred handles reference objects that were accessible during |
482 // graph creation. To make sure that we don't encounter inconsistencies | 492 // graph creation. To make sure that we don't encounter inconsistencies |
483 // between graph creation and code generation, we disallow accessing | 493 // between graph creation and code generation, we disallow accessing |
484 // objects through deferred handles during the latter, with exceptions. | 494 // objects through deferred handles during the latter, with exceptions. |
485 DisallowDeferredHandleDereference no_deferred_handle_deref; | 495 DisallowDeferredHandleDereference no_deferred_handle_deref; |
486 Handle<Code> optimized_code = chunk_->Codegen(); | 496 Handle<Code> optimized_code = chunk_->Codegen(); |
(...skipping 768 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1255 // Trace if the appropriate trace flag is set and the phase name's first | 1265 // Trace if the appropriate trace flag is set and the phase name's first |
1256 // character is in the FLAG_trace_phase command line parameter. | 1266 // character is in the FLAG_trace_phase command line parameter. |
1257 bool tracing_on = info()->IsStub() ? | 1267 bool tracing_on = info()->IsStub() ? |
1258 FLAG_trace_hydrogen_stubs : | 1268 FLAG_trace_hydrogen_stubs : |
1259 FLAG_trace_hydrogen; | 1269 FLAG_trace_hydrogen; |
1260 return (tracing_on && | 1270 return (tracing_on && |
1261 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL); | 1271 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL); |
1262 } | 1272 } |
1263 | 1273 |
1264 } } // namespace v8::internal | 1274 } } // namespace v8::internal |
OLD | NEW |