| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 45 current_ = NULL; | 45 current_ = NULL; |
| 46 deoptimizing_code_list_ = NULL; | 46 deoptimizing_code_list_ = NULL; |
| 47 #ifdef ENABLE_DEBUGGER_SUPPORT | 47 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 48 deoptimized_frame_info_ = NULL; | 48 deoptimized_frame_info_ = NULL; |
| 49 #endif | 49 #endif |
| 50 } | 50 } |
| 51 | 51 |
| 52 | 52 |
| 53 DeoptimizerData::~DeoptimizerData() { | 53 DeoptimizerData::~DeoptimizerData() { |
| 54 if (eager_deoptimization_entry_code_ != NULL) { | 54 if (eager_deoptimization_entry_code_ != NULL) { |
| 55 eager_deoptimization_entry_code_->Free(EXECUTABLE); | 55 Isolate::Current()->memory_allocator()->Free( |
| 56 eager_deoptimization_entry_code_); |
| 56 eager_deoptimization_entry_code_ = NULL; | 57 eager_deoptimization_entry_code_ = NULL; |
| 57 } | 58 } |
| 58 if (lazy_deoptimization_entry_code_ != NULL) { | 59 if (lazy_deoptimization_entry_code_ != NULL) { |
| 59 lazy_deoptimization_entry_code_->Free(EXECUTABLE); | 60 Isolate::Current()->memory_allocator()->Free( |
| 61 lazy_deoptimization_entry_code_); |
| 60 lazy_deoptimization_entry_code_ = NULL; | 62 lazy_deoptimization_entry_code_ = NULL; |
| 61 } | 63 } |
| 62 } | 64 } |
| 63 | 65 |
| 64 | 66 |
| 65 #ifdef ENABLE_DEBUGGER_SUPPORT | 67 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 66 void DeoptimizerData::Iterate(ObjectVisitor* v) { | 68 void DeoptimizerData::Iterate(ObjectVisitor* v) { |
| 67 if (deoptimized_frame_info_ != NULL) { | 69 if (deoptimized_frame_info_ != NULL) { |
| 68 deoptimized_frame_info_->Iterate(v); | 70 deoptimized_frame_info_->Iterate(v); |
| 69 } | 71 } |
| 70 } | 72 } |
| 71 #endif | 73 #endif |
| 72 | 74 |
| 73 | 75 |
| 76 // We rely on this function not causing a GC. It is called from generated code |
| 77 // without having a real stack frame in place. |
| 74 Deoptimizer* Deoptimizer::New(JSFunction* function, | 78 Deoptimizer* Deoptimizer::New(JSFunction* function, |
| 75 BailoutType type, | 79 BailoutType type, |
| 76 unsigned bailout_id, | 80 unsigned bailout_id, |
| 77 Address from, | 81 Address from, |
| 78 int fp_to_sp_delta, | 82 int fp_to_sp_delta, |
| 79 Isolate* isolate) { | 83 Isolate* isolate) { |
| 80 ASSERT(isolate == Isolate::Current()); | 84 ASSERT(isolate == Isolate::Current()); |
| 81 Deoptimizer* deoptimizer = new Deoptimizer(isolate, | 85 Deoptimizer* deoptimizer = new Deoptimizer(isolate, |
| 82 function, | 86 function, |
| 83 type, | 87 type, |
| (...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 312 Code* optimized_code) | 316 Code* optimized_code) |
| 313 : isolate_(isolate), | 317 : isolate_(isolate), |
| 314 function_(function), | 318 function_(function), |
| 315 bailout_id_(bailout_id), | 319 bailout_id_(bailout_id), |
| 316 bailout_type_(type), | 320 bailout_type_(type), |
| 317 from_(from), | 321 from_(from), |
| 318 fp_to_sp_delta_(fp_to_sp_delta), | 322 fp_to_sp_delta_(fp_to_sp_delta), |
| 319 input_(NULL), | 323 input_(NULL), |
| 320 output_count_(0), | 324 output_count_(0), |
| 321 output_(NULL), | 325 output_(NULL), |
| 326 frame_alignment_marker_(isolate->heap()->frame_alignment_marker()), |
| 327 has_alignment_padding_(0), |
| 322 deferred_heap_numbers_(0) { | 328 deferred_heap_numbers_(0) { |
| 323 if (FLAG_trace_deopt && type != OSR) { | 329 if (FLAG_trace_deopt && type != OSR) { |
| 324 if (type == DEBUGGER) { | 330 if (type == DEBUGGER) { |
| 325 PrintF("**** DEOPT FOR DEBUGGER: "); | 331 PrintF("**** DEOPT FOR DEBUGGER: "); |
| 326 } else { | 332 } else { |
| 327 PrintF("**** DEOPT: "); | 333 PrintF("**** DEOPT: "); |
| 328 } | 334 } |
| 329 function->PrintName(); | 335 function->PrintName(); |
| 330 PrintF(" at bailout #%u, address 0x%" V8PRIxPTR ", frame size %d\n", | 336 PrintF(" at bailout #%u, address 0x%" V8PRIxPTR ", frame size %d\n", |
| 331 bailout_id, | 337 bailout_id, |
| 332 reinterpret_cast<intptr_t>(from), | 338 reinterpret_cast<intptr_t>(from), |
| 333 fp_to_sp_delta - (2 * kPointerSize)); | 339 fp_to_sp_delta - (2 * kPointerSize)); |
| 334 } else if (FLAG_trace_osr && type == OSR) { | 340 } else if (FLAG_trace_osr && type == OSR) { |
| 335 PrintF("**** OSR: "); | 341 PrintF("**** OSR: "); |
| 336 function->PrintName(); | 342 function->PrintName(); |
| 337 PrintF(" at ast id #%u, address 0x%" V8PRIxPTR ", frame size %d\n", | 343 PrintF(" at ast id #%u, address 0x%" V8PRIxPTR ", frame size %d\n", |
| 338 bailout_id, | 344 bailout_id, |
| 339 reinterpret_cast<intptr_t>(from), | 345 reinterpret_cast<intptr_t>(from), |
| 340 fp_to_sp_delta - (2 * kPointerSize)); | 346 fp_to_sp_delta - (2 * kPointerSize)); |
| 341 } | 347 } |
| 342 // Find the optimized code. | 348 // Find the optimized code. |
| 343 if (type == EAGER) { | 349 if (type == EAGER) { |
| 344 ASSERT(from == NULL); | 350 ASSERT(from == NULL); |
| 345 optimized_code_ = function_->code(); | 351 optimized_code_ = function_->code(); |
| 352 if (FLAG_trace_deopt && FLAG_code_comments) { |
| 353 // Print instruction associated with this bailout. |
| 354 const char* last_comment = NULL; |
| 355 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT) |
| 356 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
| 357 for (RelocIterator it(optimized_code_, mask); !it.done(); it.next()) { |
| 358 RelocInfo* info = it.rinfo(); |
| 359 if (info->rmode() == RelocInfo::COMMENT) { |
| 360 last_comment = reinterpret_cast<const char*>(info->data()); |
| 361 } |
| 362 if (info->rmode() == RelocInfo::RUNTIME_ENTRY) { |
| 363 unsigned id = Deoptimizer::GetDeoptimizationId( |
| 364 info->target_address(), Deoptimizer::EAGER); |
| 365 if (id == bailout_id && last_comment != NULL) { |
| 366 PrintF(" %s\n", last_comment); |
| 367 break; |
| 368 } |
| 369 } |
| 370 } |
| 371 } |
| 346 } else if (type == LAZY) { | 372 } else if (type == LAZY) { |
| 347 optimized_code_ = FindDeoptimizingCodeFromAddress(from); | 373 optimized_code_ = FindDeoptimizingCodeFromAddress(from); |
| 348 ASSERT(optimized_code_ != NULL); | 374 ASSERT(optimized_code_ != NULL); |
| 349 } else if (type == OSR) { | 375 } else if (type == OSR) { |
| 350 // The function has already been optimized and we're transitioning | 376 // The function has already been optimized and we're transitioning |
| 351 // from the unoptimized shared version to the optimized one in the | 377 // from the unoptimized shared version to the optimized one in the |
| 352 // function. The return address (from) points to unoptimized code. | 378 // function. The return address (from) points to unoptimized code. |
| 353 optimized_code_ = function_->code(); | 379 optimized_code_ = function_->code(); |
| 354 ASSERT(optimized_code_->kind() == Code::OPTIMIZED_FUNCTION); | 380 ASSERT(optimized_code_->kind() == Code::OPTIMIZED_FUNCTION); |
| 355 ASSERT(!optimized_code_->contains(from)); | 381 ASSERT(!optimized_code_->contains(from)); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 379 delete[] output_; | 405 delete[] output_; |
| 380 input_ = NULL; | 406 input_ = NULL; |
| 381 output_ = NULL; | 407 output_ = NULL; |
| 382 ASSERT(!HEAP->allow_allocation(true)); | 408 ASSERT(!HEAP->allow_allocation(true)); |
| 383 } | 409 } |
| 384 | 410 |
| 385 | 411 |
| 386 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) { | 412 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) { |
| 387 ASSERT(id >= 0); | 413 ASSERT(id >= 0); |
| 388 if (id >= kNumberOfEntries) return NULL; | 414 if (id >= kNumberOfEntries) return NULL; |
| 389 LargeObjectChunk* base = NULL; | 415 MemoryChunk* base = NULL; |
| 390 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 416 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
| 391 if (type == EAGER) { | 417 if (type == EAGER) { |
| 392 if (data->eager_deoptimization_entry_code_ == NULL) { | 418 if (data->eager_deoptimization_entry_code_ == NULL) { |
| 393 data->eager_deoptimization_entry_code_ = CreateCode(type); | 419 data->eager_deoptimization_entry_code_ = CreateCode(type); |
| 394 } | 420 } |
| 395 base = data->eager_deoptimization_entry_code_; | 421 base = data->eager_deoptimization_entry_code_; |
| 396 } else { | 422 } else { |
| 397 if (data->lazy_deoptimization_entry_code_ == NULL) { | 423 if (data->lazy_deoptimization_entry_code_ == NULL) { |
| 398 data->lazy_deoptimization_entry_code_ = CreateCode(type); | 424 data->lazy_deoptimization_entry_code_ = CreateCode(type); |
| 399 } | 425 } |
| 400 base = data->lazy_deoptimization_entry_code_; | 426 base = data->lazy_deoptimization_entry_code_; |
| 401 } | 427 } |
| 402 return | 428 return |
| 403 static_cast<Address>(base->GetStartAddress()) + (id * table_entry_size_); | 429 static_cast<Address>(base->body()) + (id * table_entry_size_); |
| 404 } | 430 } |
| 405 | 431 |
| 406 | 432 |
| 407 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { | 433 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { |
| 408 LargeObjectChunk* base = NULL; | 434 MemoryChunk* base = NULL; |
| 409 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 435 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
| 410 if (type == EAGER) { | 436 if (type == EAGER) { |
| 411 base = data->eager_deoptimization_entry_code_; | 437 base = data->eager_deoptimization_entry_code_; |
| 412 } else { | 438 } else { |
| 413 base = data->lazy_deoptimization_entry_code_; | 439 base = data->lazy_deoptimization_entry_code_; |
| 414 } | 440 } |
| 415 if (base == NULL || | 441 if (base == NULL || |
| 416 addr < base->GetStartAddress() || | 442 addr < base->body() || |
| 417 addr >= base->GetStartAddress() + | 443 addr >= base->body() + |
| 418 (kNumberOfEntries * table_entry_size_)) { | 444 (kNumberOfEntries * table_entry_size_)) { |
| 419 return kNotDeoptimizationEntry; | 445 return kNotDeoptimizationEntry; |
| 420 } | 446 } |
| 421 ASSERT_EQ(0, | 447 ASSERT_EQ(0, |
| 422 static_cast<int>(addr - base->GetStartAddress()) % table_entry_size_); | 448 static_cast<int>(addr - base->body()) % table_entry_size_); |
| 423 return static_cast<int>(addr - base->GetStartAddress()) / table_entry_size_; | 449 return static_cast<int>(addr - base->body()) / table_entry_size_; |
| 424 } | 450 } |
| 425 | 451 |
| 426 | 452 |
| 427 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, | 453 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, |
| 428 unsigned id, | 454 unsigned id, |
| 429 SharedFunctionInfo* shared) { | 455 SharedFunctionInfo* shared) { |
| 430 // TODO(kasperl): For now, we do a simple linear search for the PC | 456 // TODO(kasperl): For now, we do a simple linear search for the PC |
| 431 // offset associated with the given node id. This should probably be | 457 // offset associated with the given node id. This should probably be |
| 432 // changed to a binary search. | 458 // changed to a binary search. |
| 433 int length = data->DeoptPoints(); | 459 int length = data->DeoptPoints(); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 455 DeoptimizingCodeListNode* node = | 481 DeoptimizingCodeListNode* node = |
| 456 isolate->deoptimizer_data()->deoptimizing_code_list_; | 482 isolate->deoptimizer_data()->deoptimizing_code_list_; |
| 457 while (node != NULL) { | 483 while (node != NULL) { |
| 458 length++; | 484 length++; |
| 459 node = node->next(); | 485 node = node->next(); |
| 460 } | 486 } |
| 461 return length; | 487 return length; |
| 462 } | 488 } |
| 463 | 489 |
| 464 | 490 |
| 491 // We rely on this function not causing a GC. It is called from generated code |
| 492 // without having a real stack frame in place. |
| 465 void Deoptimizer::DoComputeOutputFrames() { | 493 void Deoptimizer::DoComputeOutputFrames() { |
| 466 if (bailout_type_ == OSR) { | 494 if (bailout_type_ == OSR) { |
| 467 DoComputeOsrOutputFrame(); | 495 DoComputeOsrOutputFrame(); |
| 468 return; | 496 return; |
| 469 } | 497 } |
| 470 | 498 |
| 471 // Print some helpful diagnostic information. | 499 // Print some helpful diagnostic information. |
| 472 int64_t start = OS::Ticks(); | 500 int64_t start = OS::Ticks(); |
| 473 if (FLAG_trace_deopt) { | 501 if (FLAG_trace_deopt) { |
| 474 PrintF("[deoptimizing%s: begin 0x%08" V8PRIxPTR " ", | 502 PrintF("[deoptimizing%s: begin 0x%08" V8PRIxPTR " ", |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 606 case Translation::FRAME: | 634 case Translation::FRAME: |
| 607 case Translation::DUPLICATE: | 635 case Translation::DUPLICATE: |
| 608 UNREACHABLE(); | 636 UNREACHABLE(); |
| 609 return; | 637 return; |
| 610 | 638 |
| 611 case Translation::REGISTER: { | 639 case Translation::REGISTER: { |
| 612 int input_reg = iterator->Next(); | 640 int input_reg = iterator->Next(); |
| 613 intptr_t input_value = input_->GetRegister(input_reg); | 641 intptr_t input_value = input_->GetRegister(input_reg); |
| 614 if (FLAG_trace_deopt) { | 642 if (FLAG_trace_deopt) { |
| 615 PrintF( | 643 PrintF( |
| 616 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s\n", | 644 " 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s ", |
| 617 output_[frame_index]->GetTop() + output_offset, | 645 output_[frame_index]->GetTop() + output_offset, |
| 618 output_offset, | 646 output_offset, |
| 619 input_value, | 647 input_value, |
| 620 converter.NameOfCPURegister(input_reg)); | 648 converter.NameOfCPURegister(input_reg)); |
| 649 reinterpret_cast<Object*>(input_value)->ShortPrint(); |
| 650 PrintF("\n"); |
| 621 } | 651 } |
| 622 output_[frame_index]->SetFrameSlot(output_offset, input_value); | 652 output_[frame_index]->SetFrameSlot(output_offset, input_value); |
| 623 return; | 653 return; |
| 624 } | 654 } |
| 625 | 655 |
| 626 case Translation::INT32_REGISTER: { | 656 case Translation::INT32_REGISTER: { |
| 627 int input_reg = iterator->Next(); | 657 int input_reg = iterator->Next(); |
| 628 intptr_t value = input_->GetRegister(input_reg); | 658 intptr_t value = input_->GetRegister(input_reg); |
| 629 bool is_smi = Smi::IsValid(value); | 659 bool is_smi = Smi::IsValid(value); |
| 630 if (FLAG_trace_deopt) { | 660 if (FLAG_trace_deopt) { |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 668 } | 698 } |
| 669 | 699 |
| 670 case Translation::STACK_SLOT: { | 700 case Translation::STACK_SLOT: { |
| 671 int input_slot_index = iterator->Next(); | 701 int input_slot_index = iterator->Next(); |
| 672 unsigned input_offset = | 702 unsigned input_offset = |
| 673 input_->GetOffsetFromSlotIndex(this, input_slot_index); | 703 input_->GetOffsetFromSlotIndex(this, input_slot_index); |
| 674 intptr_t input_value = input_->GetFrameSlot(input_offset); | 704 intptr_t input_value = input_->GetFrameSlot(input_offset); |
| 675 if (FLAG_trace_deopt) { | 705 if (FLAG_trace_deopt) { |
| 676 PrintF(" 0x%08" V8PRIxPTR ": ", | 706 PrintF(" 0x%08" V8PRIxPTR ": ", |
| 677 output_[frame_index]->GetTop() + output_offset); | 707 output_[frame_index]->GetTop() + output_offset); |
| 678 PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d]\n", | 708 PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d] ", |
| 679 output_offset, | 709 output_offset, |
| 680 input_value, | 710 input_value, |
| 681 input_offset); | 711 input_offset); |
| 712 reinterpret_cast<Object*>(input_value)->ShortPrint(); |
| 713 PrintF("\n"); |
| 682 } | 714 } |
| 683 output_[frame_index]->SetFrameSlot(output_offset, input_value); | 715 output_[frame_index]->SetFrameSlot(output_offset, input_value); |
| 684 return; | 716 return; |
| 685 } | 717 } |
| 686 | 718 |
| 687 case Translation::INT32_STACK_SLOT: { | 719 case Translation::INT32_STACK_SLOT: { |
| 688 int input_slot_index = iterator->Next(); | 720 int input_slot_index = iterator->Next(); |
| 689 unsigned input_offset = | 721 unsigned input_offset = |
| 690 input_->GetOffsetFromSlotIndex(this, input_slot_index); | 722 input_->GetOffsetFromSlotIndex(this, input_slot_index); |
| 691 intptr_t value = input_->GetFrameSlot(input_offset); | 723 intptr_t value = input_->GetFrameSlot(input_offset); |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 946 // Iterate over the stack check table and patch every stack check | 978 // Iterate over the stack check table and patch every stack check |
| 947 // call to an unconditional call to the replacement code. | 979 // call to an unconditional call to the replacement code. |
| 948 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 980 ASSERT(unoptimized_code->kind() == Code::FUNCTION); |
| 949 Address stack_check_cursor = unoptimized_code->instruction_start() + | 981 Address stack_check_cursor = unoptimized_code->instruction_start() + |
| 950 unoptimized_code->stack_check_table_offset(); | 982 unoptimized_code->stack_check_table_offset(); |
| 951 uint32_t table_length = Memory::uint32_at(stack_check_cursor); | 983 uint32_t table_length = Memory::uint32_at(stack_check_cursor); |
| 952 stack_check_cursor += kIntSize; | 984 stack_check_cursor += kIntSize; |
| 953 for (uint32_t i = 0; i < table_length; ++i) { | 985 for (uint32_t i = 0; i < table_length; ++i) { |
| 954 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); | 986 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); |
| 955 Address pc_after = unoptimized_code->instruction_start() + pc_offset; | 987 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
| 956 PatchStackCheckCodeAt(pc_after, check_code, replacement_code); | 988 PatchStackCheckCodeAt(unoptimized_code, |
| 989 pc_after, |
| 990 check_code, |
| 991 replacement_code); |
| 957 stack_check_cursor += 2 * kIntSize; | 992 stack_check_cursor += 2 * kIntSize; |
| 958 } | 993 } |
| 959 } | 994 } |
| 960 | 995 |
| 961 | 996 |
| 962 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, | 997 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, |
| 963 Code* check_code, | 998 Code* check_code, |
| 964 Code* replacement_code) { | 999 Code* replacement_code) { |
| 965 // Iterate over the stack check table and revert the patched | 1000 // Iterate over the stack check table and revert the patched |
| 966 // stack check calls. | 1001 // stack check calls. |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1032 | 1067 |
| 1033 | 1068 |
| 1034 void Deoptimizer::AddDoubleValue(intptr_t slot_address, | 1069 void Deoptimizer::AddDoubleValue(intptr_t slot_address, |
| 1035 double value) { | 1070 double value) { |
| 1036 HeapNumberMaterializationDescriptor value_desc( | 1071 HeapNumberMaterializationDescriptor value_desc( |
| 1037 reinterpret_cast<Address>(slot_address), value); | 1072 reinterpret_cast<Address>(slot_address), value); |
| 1038 deferred_heap_numbers_.Add(value_desc); | 1073 deferred_heap_numbers_.Add(value_desc); |
| 1039 } | 1074 } |
| 1040 | 1075 |
| 1041 | 1076 |
| 1042 LargeObjectChunk* Deoptimizer::CreateCode(BailoutType type) { | 1077 MemoryChunk* Deoptimizer::CreateCode(BailoutType type) { |
| 1043 // We cannot run this if the serializer is enabled because this will | 1078 // We cannot run this if the serializer is enabled because this will |
| 1044 // cause us to emit relocation information for the external | 1079 // cause us to emit relocation information for the external |
| 1045 // references. This is fine because the deoptimizer's code section | 1080 // references. This is fine because the deoptimizer's code section |
| 1046 // isn't meant to be serialized at all. | 1081 // isn't meant to be serialized at all. |
| 1047 ASSERT(!Serializer::enabled()); | 1082 ASSERT(!Serializer::enabled()); |
| 1048 | 1083 |
| 1049 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); | 1084 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); |
| 1050 masm.set_emit_debug_code(false); | 1085 masm.set_emit_debug_code(false); |
| 1051 GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type); | 1086 GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type); |
| 1052 CodeDesc desc; | 1087 CodeDesc desc; |
| 1053 masm.GetCode(&desc); | 1088 masm.GetCode(&desc); |
| 1054 ASSERT(desc.reloc_size == 0); | 1089 ASSERT(desc.reloc_size == 0); |
| 1055 | 1090 |
| 1056 LargeObjectChunk* chunk = LargeObjectChunk::New(desc.instr_size, EXECUTABLE); | 1091 MemoryChunk* chunk = |
| 1092 Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size, |
| 1093 EXECUTABLE, |
| 1094 NULL); |
| 1057 if (chunk == NULL) { | 1095 if (chunk == NULL) { |
| 1058 V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); | 1096 V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); |
| 1059 } | 1097 } |
| 1060 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size); | 1098 memcpy(chunk->body(), desc.buffer, desc.instr_size); |
| 1061 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size); | 1099 CPU::FlushICache(chunk->body(), desc.instr_size); |
| 1062 return chunk; | 1100 return chunk; |
| 1063 } | 1101 } |
| 1064 | 1102 |
| 1065 | 1103 |
| 1066 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { | 1104 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { |
| 1067 DeoptimizingCodeListNode* node = | 1105 DeoptimizingCodeListNode* node = |
| 1068 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; | 1106 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; |
| 1069 while (node != NULL) { | 1107 while (node != NULL) { |
| 1070 if (node->code()->contains(addr)) return *node->code(); | 1108 if (node->code()->contains(addr)) return *node->code(); |
| 1071 node = node->next(); | 1109 node = node->next(); |
| (...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1464 | 1502 |
| 1465 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 1503 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
| 1466 v->VisitPointer(BitCast<Object**>(&function_)); | 1504 v->VisitPointer(BitCast<Object**>(&function_)); |
| 1467 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 1505 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
| 1468 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 1506 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
| 1469 } | 1507 } |
| 1470 | 1508 |
| 1471 #endif // ENABLE_DEBUGGER_SUPPORT | 1509 #endif // ENABLE_DEBUGGER_SUPPORT |
| 1472 | 1510 |
| 1473 } } // namespace v8::internal | 1511 } } // namespace v8::internal |
| OLD | NEW |