Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 34 #include "full-codegen.h" | 34 #include "full-codegen.h" |
| 35 #include "global-handles.h" | 35 #include "global-handles.h" |
| 36 #include "macro-assembler.h" | 36 #include "macro-assembler.h" |
| 37 #include "prettyprinter.h" | 37 #include "prettyprinter.h" |
| 38 | 38 |
| 39 | 39 |
| 40 namespace v8 { | 40 namespace v8 { |
| 41 namespace internal { | 41 namespace internal { |
| 42 | 42 |
| 43 DeoptimizerData::DeoptimizerData() { | 43 DeoptimizerData::DeoptimizerData() { |
| 44 eager_deoptimization_entry_code_ = NULL; | 44 eager_deoptimization_entry_code_entries_ = -1; |
| 45 lazy_deoptimization_entry_code_ = NULL; | 45 lazy_deoptimization_entry_code_entries_ = -1; |
| 46 size_t deopt_table_size = Deoptimizer::GetMaxDeoptTableSize(); | |
| 47 eager_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size); | |
| 48 lazy_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size); | |
| 46 current_ = NULL; | 49 current_ = NULL; |
| 47 deoptimizing_code_list_ = NULL; | 50 deoptimizing_code_list_ = NULL; |
| 48 #ifdef ENABLE_DEBUGGER_SUPPORT | 51 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 49 deoptimized_frame_info_ = NULL; | 52 deoptimized_frame_info_ = NULL; |
| 50 #endif | 53 #endif |
| 51 } | 54 } |
| 52 | 55 |
| 53 | 56 |
| 54 DeoptimizerData::~DeoptimizerData() { | 57 DeoptimizerData::~DeoptimizerData() { |
| 55 if (eager_deoptimization_entry_code_ != NULL) { | 58 if (eager_deoptimization_entry_code_ != NULL) { |
|
Jakob Kummerow
2012/11/06 11:40:50
nit: you can simplify this a bit by removing the c
danno
2012/11/06 11:59:03
Done.
| |
| 56 Isolate::Current()->memory_allocator()->Free( | 59 delete eager_deoptimization_entry_code_; |
| 57 eager_deoptimization_entry_code_); | |
| 58 eager_deoptimization_entry_code_ = NULL; | 60 eager_deoptimization_entry_code_ = NULL; |
| 59 } | 61 } |
| 60 if (lazy_deoptimization_entry_code_ != NULL) { | 62 if (lazy_deoptimization_entry_code_ != NULL) { |
| 61 Isolate::Current()->memory_allocator()->Free( | 63 delete lazy_deoptimization_entry_code_; |
| 62 lazy_deoptimization_entry_code_); | |
| 63 lazy_deoptimization_entry_code_ = NULL; | 64 lazy_deoptimization_entry_code_ = NULL; |
| 64 } | 65 } |
| 65 DeoptimizingCodeListNode* current = deoptimizing_code_list_; | 66 DeoptimizingCodeListNode* current = deoptimizing_code_list_; |
| 66 while (current != NULL) { | 67 while (current != NULL) { |
| 67 DeoptimizingCodeListNode* prev = current; | 68 DeoptimizingCodeListNode* prev = current; |
| 68 current = current->next(); | 69 current = current->next(); |
| 69 delete prev; | 70 delete prev; |
| 70 } | 71 } |
| 71 deoptimizing_code_list_ = NULL; | 72 deoptimizing_code_list_ = NULL; |
| 72 } | 73 } |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 96 bailout_id, | 97 bailout_id, |
| 97 from, | 98 from, |
| 98 fp_to_sp_delta, | 99 fp_to_sp_delta, |
| 99 NULL); | 100 NULL); |
| 100 ASSERT(isolate->deoptimizer_data()->current_ == NULL); | 101 ASSERT(isolate->deoptimizer_data()->current_ == NULL); |
| 101 isolate->deoptimizer_data()->current_ = deoptimizer; | 102 isolate->deoptimizer_data()->current_ = deoptimizer; |
| 102 return deoptimizer; | 103 return deoptimizer; |
| 103 } | 104 } |
| 104 | 105 |
| 105 | 106 |
| 107 // No larger than 2K on all platforms | |
| 108 static const int kDeoptTableMaxEpilogueCodeSize = 2 * 1024; | |
|
Jakob Kummerow
2012/11/06 11:40:50
nit: 2 * KB
| |
| 109 | |
| 110 | |
| 111 size_t Deoptimizer::GetMaxDeoptTableSize() { | |
| 112 size_t entries_size = | |
| 113 Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_; | |
| 114 int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) / | |
| 115 OS::CommitPageSize()) + 1; | |
| 116 return OS::CommitPageSize() * page_count; | |
| 117 } | |
| 118 | |
| 119 | |
| 106 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { | 120 Deoptimizer* Deoptimizer::Grab(Isolate* isolate) { |
| 107 ASSERT(isolate == Isolate::Current()); | 121 ASSERT(isolate == Isolate::Current()); |
| 108 Deoptimizer* result = isolate->deoptimizer_data()->current_; | 122 Deoptimizer* result = isolate->deoptimizer_data()->current_; |
| 109 ASSERT(result != NULL); | 123 ASSERT(result != NULL); |
| 110 result->DeleteFrameDescriptions(); | 124 result->DeleteFrameDescriptions(); |
| 111 isolate->deoptimizer_data()->current_ = NULL; | 125 isolate->deoptimizer_data()->current_ = NULL; |
| 112 return result; | 126 return result; |
| 113 } | 127 } |
| 114 | 128 |
| 115 | 129 |
| (...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 454 for (int i = 0; i < output_count_; ++i) { | 468 for (int i = 0; i < output_count_; ++i) { |
| 455 if (output_[i] != input_) delete output_[i]; | 469 if (output_[i] != input_) delete output_[i]; |
| 456 } | 470 } |
| 457 delete[] output_; | 471 delete[] output_; |
| 458 input_ = NULL; | 472 input_ = NULL; |
| 459 output_ = NULL; | 473 output_ = NULL; |
| 460 ASSERT(!HEAP->allow_allocation(true)); | 474 ASSERT(!HEAP->allow_allocation(true)); |
| 461 } | 475 } |
| 462 | 476 |
| 463 | 477 |
| 464 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) { | 478 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type, |
|
Jakob Kummerow
2012/11/06 11:40:50
nit: one line per argument
danno
2012/11/06 11:59:03
Done.
| |
| 479 GetEntryMode mode) { | |
| 465 ASSERT(id >= 0); | 480 ASSERT(id >= 0); |
| 466 if (id >= kNumberOfEntries) return NULL; | 481 if (id >= kMaxNumberOfEntries) return NULL; |
| 467 MemoryChunk* base = NULL; | 482 VirtualMemory* base = NULL; |
| 468 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 483 if (mode == ENSURE_ENTRY_CODE) { |
| 469 if (type == EAGER) { | 484 EnsureCodeForDeoptimizationEntry(type, id); |
| 470 if (data->eager_deoptimization_entry_code_ == NULL) { | |
| 471 data->eager_deoptimization_entry_code_ = CreateCode(type); | |
| 472 } | |
| 473 base = data->eager_deoptimization_entry_code_; | |
| 474 } else { | 485 } else { |
| 475 if (data->lazy_deoptimization_entry_code_ == NULL) { | 486 ASSERT(mode == CALCULATE_ENTRY_ADDRESS); |
| 476 data->lazy_deoptimization_entry_code_ = CreateCode(type); | |
| 477 } | |
| 478 base = data->lazy_deoptimization_entry_code_; | |
| 479 } | 487 } |
| 480 return | |
| 481 static_cast<Address>(base->area_start()) + (id * table_entry_size_); | |
| 482 } | |
| 483 | |
| 484 | |
| 485 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { | |
| 486 MemoryChunk* base = NULL; | |
| 487 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 488 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
| 488 if (type == EAGER) { | 489 if (type == EAGER) { |
| 489 base = data->eager_deoptimization_entry_code_; | 490 base = data->eager_deoptimization_entry_code_; |
| 490 } else { | 491 } else { |
| 491 base = data->lazy_deoptimization_entry_code_; | 492 base = data->lazy_deoptimization_entry_code_; |
| 492 } | 493 } |
| 494 return | |
| 495 static_cast<Address>(base->address()) + (id * table_entry_size_); | |
| 496 } | |
| 497 | |
| 498 | |
| 499 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { | |
| 500 VirtualMemory* base = NULL; | |
| 501 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | |
| 502 if (type == EAGER) { | |
| 503 base = data->eager_deoptimization_entry_code_; | |
| 504 } else { | |
| 505 base = data->lazy_deoptimization_entry_code_; | |
| 506 } | |
| 507 Address base_casted = reinterpret_cast<Address>(base->address()); | |
| 493 if (base == NULL || | 508 if (base == NULL || |
| 494 addr < base->area_start() || | 509 addr < base->address() || |
| 495 addr >= base->area_start() + | 510 addr >= base_casted + (kMaxNumberOfEntries * table_entry_size_)) { |
| 496 (kNumberOfEntries * table_entry_size_)) { | |
| 497 return kNotDeoptimizationEntry; | 511 return kNotDeoptimizationEntry; |
| 498 } | 512 } |
| 499 ASSERT_EQ(0, | 513 ASSERT_EQ(0, |
| 500 static_cast<int>(addr - base->area_start()) % table_entry_size_); | 514 static_cast<int>(addr - base_casted) % table_entry_size_); |
| 501 return static_cast<int>(addr - base->area_start()) / table_entry_size_; | 515 return static_cast<int>(addr - base_casted) / table_entry_size_; |
| 502 } | 516 } |
| 503 | 517 |
| 504 | 518 |
| 505 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, | 519 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, |
| 506 BailoutId id, | 520 BailoutId id, |
| 507 SharedFunctionInfo* shared) { | 521 SharedFunctionInfo* shared) { |
| 508 // TODO(kasperl): For now, we do a simple linear search for the PC | 522 // TODO(kasperl): For now, we do a simple linear search for the PC |
| 509 // offset associated with the given node id. This should probably be | 523 // offset associated with the given node id. This should probably be |
| 510 // changed to a binary search. | 524 // changed to a binary search. |
| 511 int length = data->DeoptPoints(); | 525 int length = data->DeoptPoints(); |
| (...skipping 865 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1377 } | 1391 } |
| 1378 | 1392 |
| 1379 | 1393 |
| 1380 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) { | 1394 void Deoptimizer::AddDoubleValue(intptr_t slot_address, double value) { |
| 1381 HeapNumberMaterializationDescriptor value_desc( | 1395 HeapNumberMaterializationDescriptor value_desc( |
| 1382 reinterpret_cast<Address>(slot_address), value); | 1396 reinterpret_cast<Address>(slot_address), value); |
| 1383 deferred_heap_numbers_.Add(value_desc); | 1397 deferred_heap_numbers_.Add(value_desc); |
| 1384 } | 1398 } |
| 1385 | 1399 |
| 1386 | 1400 |
| 1387 MemoryChunk* Deoptimizer::CreateCode(BailoutType type) { | 1401 void Deoptimizer::EnsureCodeForDeoptimizationEntry(BailoutType type, |
| 1402 int max_entry_id) { | |
| 1388 // We cannot run this if the serializer is enabled because this will | 1403 // We cannot run this if the serializer is enabled because this will |
| 1389 // cause us to emit relocation information for the external | 1404 // cause us to emit relocation information for the external |
| 1390 // references. This is fine because the deoptimizer's code section | 1405 // references. This is fine because the deoptimizer's code section |
| 1391 // isn't meant to be serialized at all. | 1406 // isn't meant to be serialized at all. |
| 1392 ASSERT(!Serializer::enabled()); | 1407 ASSERT(!Serializer::enabled()); |
| 1393 | 1408 |
| 1409 ASSERT(type == EAGER || type == LAZY); | |
| 1410 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | |
| 1411 int entry_count = (type == EAGER) | |
| 1412 ? data->eager_deoptimization_entry_code_entries_ | |
| 1413 : data->lazy_deoptimization_entry_code_entries_; | |
| 1414 if (max_entry_id < entry_count) return; | |
| 1415 entry_count = Min(Max(entry_count * 2, Deoptimizer::kMinNumberOfEntries), | |
| 1416 Deoptimizer::kMaxNumberOfEntries); | |
| 1417 | |
| 1394 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); | 1418 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); |
| 1395 masm.set_emit_debug_code(false); | 1419 masm.set_emit_debug_code(false); |
| 1396 GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type); | 1420 GenerateDeoptimizationEntries(&masm, entry_count, type); |
| 1397 CodeDesc desc; | 1421 CodeDesc desc; |
| 1398 masm.GetCode(&desc); | 1422 masm.GetCode(&desc); |
| 1399 ASSERT(desc.reloc_size == 0); | 1423 ASSERT(desc.reloc_size == 0); |
| 1400 | 1424 |
| 1401 MemoryChunk* chunk = | 1425 VirtualMemory* memory = type == EAGER |
| 1402 Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size, | 1426 ? data->eager_deoptimization_entry_code_ |
| 1403 EXECUTABLE, | 1427 : data->lazy_deoptimization_entry_code_; |
| 1404 NULL); | 1428 size_t table_size = Deoptimizer::GetMaxDeoptTableSize(); |
| 1405 ASSERT(chunk->area_size() >= desc.instr_size); | 1429 ASSERT(static_cast<int>(table_size) >= desc.instr_size); |
| 1406 if (chunk == NULL) { | 1430 memory->Commit(memory->address(), table_size, true); |
| 1407 V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); | 1431 memcpy(memory->address(), desc.buffer, desc.instr_size); |
| 1432 CPU::FlushICache(memory->address(), desc.instr_size); | |
| 1433 | |
| 1434 if (type == EAGER) { | |
| 1435 data->eager_deoptimization_entry_code_entries_ = entry_count; | |
| 1436 } else { | |
| 1437 data->lazy_deoptimization_entry_code_entries_ = entry_count; | |
| 1408 } | 1438 } |
| 1409 memcpy(chunk->area_start(), desc.buffer, desc.instr_size); | |
| 1410 CPU::FlushICache(chunk->area_start(), desc.instr_size); | |
| 1411 return chunk; | |
| 1412 } | 1439 } |
| 1413 | 1440 |
| 1414 | 1441 |
| 1415 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { | 1442 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { |
| 1416 DeoptimizingCodeListNode* node = | 1443 DeoptimizingCodeListNode* node = |
| 1417 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; | 1444 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; |
| 1418 while (node != NULL) { | 1445 while (node != NULL) { |
| 1419 if (node->code()->contains(addr)) return *node->code(); | 1446 if (node->code()->contains(addr)) return *node->code(); |
| 1420 node = node->next(); | 1447 node = node->next(); |
| 1421 } | 1448 } |
| (...skipping 574 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1996 | 2023 |
| 1997 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 2024 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 1998 v->VisitPointer(BitCast<Object**>(&function_)); | 2025 v->VisitPointer(BitCast<Object**>(&function_)); |
| 1999 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 2026 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 2000 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 2027 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 2001 } | 2028 } |
| 2002 | 2029 |
| 2003 #endif // ENABLE_DEBUGGER_SUPPORT | 2030 #endif // ENABLE_DEBUGGER_SUPPORT |
| 2004 | 2031 |
| 2005 } } // namespace v8::internal | 2032 } } // namespace v8::internal |
| OLD | NEW |