OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
45 current_ = NULL; | 45 current_ = NULL; |
46 deoptimizing_code_list_ = NULL; | 46 deoptimizing_code_list_ = NULL; |
47 #ifdef ENABLE_DEBUGGER_SUPPORT | 47 #ifdef ENABLE_DEBUGGER_SUPPORT |
48 deoptimized_frame_info_ = NULL; | 48 deoptimized_frame_info_ = NULL; |
49 #endif | 49 #endif |
50 } | 50 } |
51 | 51 |
52 | 52 |
53 DeoptimizerData::~DeoptimizerData() { | 53 DeoptimizerData::~DeoptimizerData() { |
54 if (eager_deoptimization_entry_code_ != NULL) { | 54 if (eager_deoptimization_entry_code_ != NULL) { |
55 eager_deoptimization_entry_code_->Free(EXECUTABLE); | 55 Isolate::Current()->memory_allocator()->Free( |
| 56 eager_deoptimization_entry_code_); |
56 eager_deoptimization_entry_code_ = NULL; | 57 eager_deoptimization_entry_code_ = NULL; |
57 } | 58 } |
58 if (lazy_deoptimization_entry_code_ != NULL) { | 59 if (lazy_deoptimization_entry_code_ != NULL) { |
59 lazy_deoptimization_entry_code_->Free(EXECUTABLE); | 60 Isolate::Current()->memory_allocator()->Free( |
| 61 lazy_deoptimization_entry_code_); |
60 lazy_deoptimization_entry_code_ = NULL; | 62 lazy_deoptimization_entry_code_ = NULL; |
61 } | 63 } |
62 } | 64 } |
63 | 65 |
64 | 66 |
65 #ifdef ENABLE_DEBUGGER_SUPPORT | 67 #ifdef ENABLE_DEBUGGER_SUPPORT |
66 void DeoptimizerData::Iterate(ObjectVisitor* v) { | 68 void DeoptimizerData::Iterate(ObjectVisitor* v) { |
67 if (deoptimized_frame_info_ != NULL) { | 69 if (deoptimized_frame_info_ != NULL) { |
68 deoptimized_frame_info_->Iterate(v); | 70 deoptimized_frame_info_->Iterate(v); |
69 } | 71 } |
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
381 delete[] output_; | 383 delete[] output_; |
382 input_ = NULL; | 384 input_ = NULL; |
383 output_ = NULL; | 385 output_ = NULL; |
384 ASSERT(!HEAP->allow_allocation(true)); | 386 ASSERT(!HEAP->allow_allocation(true)); |
385 } | 387 } |
386 | 388 |
387 | 389 |
388 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) { | 390 Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) { |
389 ASSERT(id >= 0); | 391 ASSERT(id >= 0); |
390 if (id >= kNumberOfEntries) return NULL; | 392 if (id >= kNumberOfEntries) return NULL; |
391 LargeObjectChunk* base = NULL; | 393 MemoryChunk* base = NULL; |
392 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 394 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
393 if (type == EAGER) { | 395 if (type == EAGER) { |
394 if (data->eager_deoptimization_entry_code_ == NULL) { | 396 if (data->eager_deoptimization_entry_code_ == NULL) { |
395 data->eager_deoptimization_entry_code_ = CreateCode(type); | 397 data->eager_deoptimization_entry_code_ = CreateCode(type); |
396 } | 398 } |
397 base = data->eager_deoptimization_entry_code_; | 399 base = data->eager_deoptimization_entry_code_; |
398 } else { | 400 } else { |
399 if (data->lazy_deoptimization_entry_code_ == NULL) { | 401 if (data->lazy_deoptimization_entry_code_ == NULL) { |
400 data->lazy_deoptimization_entry_code_ = CreateCode(type); | 402 data->lazy_deoptimization_entry_code_ = CreateCode(type); |
401 } | 403 } |
402 base = data->lazy_deoptimization_entry_code_; | 404 base = data->lazy_deoptimization_entry_code_; |
403 } | 405 } |
404 return | 406 return |
405 static_cast<Address>(base->GetStartAddress()) + (id * table_entry_size_); | 407 static_cast<Address>(base->body()) + (id * table_entry_size_); |
406 } | 408 } |
407 | 409 |
408 | 410 |
409 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { | 411 int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { |
410 LargeObjectChunk* base = NULL; | 412 MemoryChunk* base = NULL; |
411 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); | 413 DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
412 if (type == EAGER) { | 414 if (type == EAGER) { |
413 base = data->eager_deoptimization_entry_code_; | 415 base = data->eager_deoptimization_entry_code_; |
414 } else { | 416 } else { |
415 base = data->lazy_deoptimization_entry_code_; | 417 base = data->lazy_deoptimization_entry_code_; |
416 } | 418 } |
417 if (base == NULL || | 419 if (base == NULL || |
418 addr < base->GetStartAddress() || | 420 addr < base->body() || |
419 addr >= base->GetStartAddress() + | 421 addr >= base->body() + |
420 (kNumberOfEntries * table_entry_size_)) { | 422 (kNumberOfEntries * table_entry_size_)) { |
421 return kNotDeoptimizationEntry; | 423 return kNotDeoptimizationEntry; |
422 } | 424 } |
423 ASSERT_EQ(0, | 425 ASSERT_EQ(0, |
424 static_cast<int>(addr - base->GetStartAddress()) % table_entry_size_); | 426 static_cast<int>(addr - base->body()) % table_entry_size_); |
425 return static_cast<int>(addr - base->GetStartAddress()) / table_entry_size_; | 427 return static_cast<int>(addr - base->body()) / table_entry_size_; |
426 } | 428 } |
427 | 429 |
428 | 430 |
429 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, | 431 int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data, |
430 unsigned id, | 432 unsigned id, |
431 SharedFunctionInfo* shared) { | 433 SharedFunctionInfo* shared) { |
432 // TODO(kasperl): For now, we do a simple linear search for the PC | 434 // TODO(kasperl): For now, we do a simple linear search for the PC |
433 // offset associated with the given node id. This should probably be | 435 // offset associated with the given node id. This should probably be |
434 // changed to a binary search. | 436 // changed to a binary search. |
435 int length = data->DeoptPoints(); | 437 int length = data->DeoptPoints(); |
(...skipping 514 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
950 // Iterate over the stack check table and patch every stack check | 952 // Iterate over the stack check table and patch every stack check |
951 // call to an unconditional call to the replacement code. | 953 // call to an unconditional call to the replacement code. |
952 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 954 ASSERT(unoptimized_code->kind() == Code::FUNCTION); |
953 Address stack_check_cursor = unoptimized_code->instruction_start() + | 955 Address stack_check_cursor = unoptimized_code->instruction_start() + |
954 unoptimized_code->stack_check_table_offset(); | 956 unoptimized_code->stack_check_table_offset(); |
955 uint32_t table_length = Memory::uint32_at(stack_check_cursor); | 957 uint32_t table_length = Memory::uint32_at(stack_check_cursor); |
956 stack_check_cursor += kIntSize; | 958 stack_check_cursor += kIntSize; |
957 for (uint32_t i = 0; i < table_length; ++i) { | 959 for (uint32_t i = 0; i < table_length; ++i) { |
958 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); | 960 uint32_t pc_offset = Memory::uint32_at(stack_check_cursor + kIntSize); |
959 Address pc_after = unoptimized_code->instruction_start() + pc_offset; | 961 Address pc_after = unoptimized_code->instruction_start() + pc_offset; |
960 PatchStackCheckCodeAt(pc_after, check_code, replacement_code); | 962 PatchStackCheckCodeAt(unoptimized_code, |
| 963 pc_after, |
| 964 check_code, |
| 965 replacement_code); |
961 stack_check_cursor += 2 * kIntSize; | 966 stack_check_cursor += 2 * kIntSize; |
962 } | 967 } |
963 } | 968 } |
964 | 969 |
965 | 970 |
966 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, | 971 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, |
967 Code* check_code, | 972 Code* check_code, |
968 Code* replacement_code) { | 973 Code* replacement_code) { |
969 // Iterate over the stack check table and revert the patched | 974 // Iterate over the stack check table and revert the patched |
970 // stack check calls. | 975 // stack check calls. |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1036 | 1041 |
1037 | 1042 |
1038 void Deoptimizer::AddDoubleValue(intptr_t slot_address, | 1043 void Deoptimizer::AddDoubleValue(intptr_t slot_address, |
1039 double value) { | 1044 double value) { |
1040 HeapNumberMaterializationDescriptor value_desc( | 1045 HeapNumberMaterializationDescriptor value_desc( |
1041 reinterpret_cast<Address>(slot_address), value); | 1046 reinterpret_cast<Address>(slot_address), value); |
1042 deferred_heap_numbers_.Add(value_desc); | 1047 deferred_heap_numbers_.Add(value_desc); |
1043 } | 1048 } |
1044 | 1049 |
1045 | 1050 |
1046 LargeObjectChunk* Deoptimizer::CreateCode(BailoutType type) { | 1051 MemoryChunk* Deoptimizer::CreateCode(BailoutType type) { |
1047 // We cannot run this if the serializer is enabled because this will | 1052 // We cannot run this if the serializer is enabled because this will |
1048 // cause us to emit relocation information for the external | 1053 // cause us to emit relocation information for the external |
1049 // references. This is fine because the deoptimizer's code section | 1054 // references. This is fine because the deoptimizer's code section |
1050 // isn't meant to be serialized at all. | 1055 // isn't meant to be serialized at all. |
1051 ASSERT(!Serializer::enabled()); | 1056 ASSERT(!Serializer::enabled()); |
1052 | 1057 |
1053 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); | 1058 MacroAssembler masm(Isolate::Current(), NULL, 16 * KB); |
1054 masm.set_emit_debug_code(false); | 1059 masm.set_emit_debug_code(false); |
1055 GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type); | 1060 GenerateDeoptimizationEntries(&masm, kNumberOfEntries, type); |
1056 CodeDesc desc; | 1061 CodeDesc desc; |
1057 masm.GetCode(&desc); | 1062 masm.GetCode(&desc); |
1058 ASSERT(desc.reloc_size == 0); | 1063 ASSERT(desc.reloc_size == 0); |
1059 | 1064 |
1060 LargeObjectChunk* chunk = LargeObjectChunk::New(desc.instr_size, EXECUTABLE); | 1065 MemoryChunk* chunk = |
| 1066 Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size, |
| 1067 EXECUTABLE, |
| 1068 NULL); |
1061 if (chunk == NULL) { | 1069 if (chunk == NULL) { |
1062 V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); | 1070 V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); |
1063 } | 1071 } |
1064 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size); | 1072 memcpy(chunk->body(), desc.buffer, desc.instr_size); |
1065 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size); | 1073 CPU::FlushICache(chunk->body(), desc.instr_size); |
1066 return chunk; | 1074 return chunk; |
1067 } | 1075 } |
1068 | 1076 |
1069 | 1077 |
1070 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { | 1078 Code* Deoptimizer::FindDeoptimizingCodeFromAddress(Address addr) { |
1071 DeoptimizingCodeListNode* node = | 1079 DeoptimizingCodeListNode* node = |
1072 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; | 1080 Isolate::Current()->deoptimizer_data()->deoptimizing_code_list_; |
1073 while (node != NULL) { | 1081 while (node != NULL) { |
1074 if (node->code()->contains(addr)) return *node->code(); | 1082 if (node->code()->contains(addr)) return *node->code(); |
1075 node = node->next(); | 1083 node = node->next(); |
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1468 | 1476 |
1469 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { | 1477 void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) { |
1470 v->VisitPointer(BitCast<Object**>(&function_)); | 1478 v->VisitPointer(BitCast<Object**>(&function_)); |
1471 v->VisitPointers(parameters_, parameters_ + parameters_count_); | 1479 v->VisitPointers(parameters_, parameters_ + parameters_count_); |
1472 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); | 1480 v->VisitPointers(expression_stack_, expression_stack_ + expression_count_); |
1473 } | 1481 } |
1474 | 1482 |
1475 #endif // ENABLE_DEBUGGER_SUPPORT | 1483 #endif // ENABLE_DEBUGGER_SUPPORT |
1476 | 1484 |
1477 } } // namespace v8::internal | 1485 } } // namespace v8::internal |
OLD | NEW |