OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
48 : codegen_(codegen), | 48 : codegen_(codegen), |
49 pointers_(pointers), | 49 pointers_(pointers), |
50 deoptimization_index_(deoptimization_index), | 50 deoptimization_index_(deoptimization_index), |
51 ensure_reloc_space_(ensure_reloc_space) { } | 51 ensure_reloc_space_(ensure_reloc_space) { } |
52 virtual ~SafepointGenerator() { } | 52 virtual ~SafepointGenerator() { } |
53 | 53 |
54 virtual void Generate() { | 54 virtual void Generate() { |
55 // Ensure that we have enough space in the reloc info to patch | 55 // Ensure that we have enough space in the reloc info to patch |
56 // this with calls when doing deoptimization. | 56 // this with calls when doing deoptimization. |
57 if (ensure_reloc_space_) { | 57 if (ensure_reloc_space_) { |
58 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); | 58 codegen_->EnsureRelocSpaceForDeoptimization(); |
59 } | 59 } |
60 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 60 codegen_->RecordSafepoint(pointers_, deoptimization_index_); |
61 } | 61 } |
62 | 62 |
63 private: | 63 private: |
64 LCodeGen* codegen_; | 64 LCodeGen* codegen_; |
65 LPointerMap* pointers_; | 65 LPointerMap* pointers_; |
66 int deoptimization_index_; | 66 int deoptimization_index_; |
67 bool ensure_reloc_space_; | 67 bool ensure_reloc_space_; |
68 }; | 68 }; |
69 | 69 |
70 | 70 |
71 #define __ masm()-> | 71 #define __ masm()-> |
72 | 72 |
73 bool LCodeGen::GenerateCode() { | 73 bool LCodeGen::GenerateCode() { |
74 HPhase phase("Code generation", chunk()); | 74 HPhase phase("Code generation", chunk()); |
75 ASSERT(is_unused()); | 75 ASSERT(is_unused()); |
76 status_ = GENERATING; | 76 status_ = GENERATING; |
77 CpuFeatures::Scope scope(SSE2); | 77 CpuFeatures::Scope scope(SSE2); |
78 return GeneratePrologue() && | 78 return GeneratePrologue() && |
79 GenerateBody() && | 79 GenerateBody() && |
80 GenerateDeferredCode() && | 80 GenerateDeferredCode() && |
| 81 GenerateRelocPadding() && |
81 GenerateSafepointTable(); | 82 GenerateSafepointTable(); |
82 } | 83 } |
83 | 84 |
84 | 85 |
85 void LCodeGen::FinishCode(Handle<Code> code) { | 86 void LCodeGen::FinishCode(Handle<Code> code) { |
86 ASSERT(is_done()); | 87 ASSERT(is_done()); |
87 code->set_stack_slots(StackSlotCount()); | 88 code->set_stack_slots(StackSlotCount()); |
88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
89 PopulateDeoptimizationData(code); | 90 PopulateDeoptimizationData(code); |
90 } | 91 } |
(...skipping 24 matching lines...) Expand all Loading... |
115 | 116 |
116 // Copy the string before recording it in the assembler to avoid | 117 // Copy the string before recording it in the assembler to avoid |
117 // issues when the stack allocated buffer goes out of scope. | 118 // issues when the stack allocated buffer goes out of scope. |
118 size_t length = builder.position(); | 119 size_t length = builder.position(); |
119 Vector<char> copy = Vector<char>::New(length + 1); | 120 Vector<char> copy = Vector<char>::New(length + 1); |
120 memcpy(copy.start(), builder.Finalize(), copy.length()); | 121 memcpy(copy.start(), builder.Finalize(), copy.length()); |
121 masm()->RecordComment(copy.start()); | 122 masm()->RecordComment(copy.start()); |
122 } | 123 } |
123 | 124 |
124 | 125 |
| 126 bool LCodeGen::GenerateRelocPadding() { |
| 127 int reloc_size = masm()->relocation_writer_size(); |
| 128 while(reloc_size < deoptimization_reloc_size.min_size) { |
| 129 __ RecordComment(RelocInfo::kFillerCommentString, true); |
| 130 reloc_size += RelocInfo::kRelocCommentSize; |
| 131 } |
| 132 return !is_aborted(); |
| 133 } |
| 134 |
| 135 |
125 bool LCodeGen::GeneratePrologue() { | 136 bool LCodeGen::GeneratePrologue() { |
126 ASSERT(is_generating()); | 137 ASSERT(is_generating()); |
127 | 138 |
128 #ifdef DEBUG | 139 #ifdef DEBUG |
129 if (strlen(FLAG_stop_at) > 0 && | 140 if (strlen(FLAG_stop_at) > 0 && |
130 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 141 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
131 __ int3(); | 142 __ int3(); |
132 } | 143 } |
133 #endif | 144 #endif |
134 | 145 |
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
375 } | 386 } |
376 | 387 |
377 | 388 |
378 void LCodeGen::CallCode(Handle<Code> code, | 389 void LCodeGen::CallCode(Handle<Code> code, |
379 RelocInfo::Mode mode, | 390 RelocInfo::Mode mode, |
380 LInstruction* instr, | 391 LInstruction* instr, |
381 bool adjusted) { | 392 bool adjusted) { |
382 ASSERT(instr != NULL); | 393 ASSERT(instr != NULL); |
383 LPointerMap* pointers = instr->pointer_map(); | 394 LPointerMap* pointers = instr->pointer_map(); |
384 RecordPosition(pointers->position()); | 395 RecordPosition(pointers->position()); |
| 396 |
385 if (!adjusted) { | 397 if (!adjusted) { |
386 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 398 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
387 } | 399 } |
388 __ call(code, mode); | 400 __ call(code, mode); |
| 401 |
| 402 EnsureRelocSpaceForDeoptimization(); |
389 RegisterLazyDeoptimization(instr); | 403 RegisterLazyDeoptimization(instr); |
390 | 404 |
391 // Signal that we don't inline smi code before these stubs in the | 405 // Signal that we don't inline smi code before these stubs in the |
392 // optimizing code generator. | 406 // optimizing code generator. |
393 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 407 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
394 code->kind() == Code::COMPARE_IC) { | 408 code->kind() == Code::COMPARE_IC) { |
395 __ nop(); | 409 __ nop(); |
396 } | 410 } |
397 } | 411 } |
398 | 412 |
(...skipping 1895 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2294 __ mov(eax, arity); | 2308 __ mov(eax, arity); |
2295 } | 2309 } |
2296 | 2310 |
2297 LPointerMap* pointers = instr->pointer_map(); | 2311 LPointerMap* pointers = instr->pointer_map(); |
2298 RecordPosition(pointers->position()); | 2312 RecordPosition(pointers->position()); |
2299 | 2313 |
2300 // Invoke function. | 2314 // Invoke function. |
2301 if (*function == *graph()->info()->closure()) { | 2315 if (*function == *graph()->info()->closure()) { |
2302 __ CallSelf(); | 2316 __ CallSelf(); |
2303 } else { | 2317 } else { |
2304 // This is an indirect call and will not be recorded in the reloc info. | |
2305 // Add a comment to the reloc info in case we need to patch this during | |
2306 // deoptimization. | |
2307 __ RecordComment(RelocInfo::kFillerCommentString, true); | |
2308 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2318 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); |
| 2319 EnsureRelocSpaceForDeoptimization(); |
2309 } | 2320 } |
2310 | 2321 |
2311 // Setup deoptimization. | 2322 // Setup deoptimization. |
2312 RegisterLazyDeoptimization(instr); | 2323 RegisterLazyDeoptimization(instr); |
2313 } | 2324 } |
2314 | 2325 |
2315 | 2326 |
2316 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2327 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
2317 ASSERT(ToRegister(instr->result()).is(eax)); | 2328 ASSERT(ToRegister(instr->result()).is(eax)); |
2318 __ mov(edi, instr->function()); | 2329 __ mov(edi, instr->function()); |
(...skipping 1485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3804 ASSERT(osr_pc_offset_ == -1); | 3815 ASSERT(osr_pc_offset_ == -1); |
3805 osr_pc_offset_ = masm()->pc_offset(); | 3816 osr_pc_offset_ = masm()->pc_offset(); |
3806 } | 3817 } |
3807 | 3818 |
3808 | 3819 |
3809 #undef __ | 3820 #undef __ |
3810 | 3821 |
3811 } } // namespace v8::internal | 3822 } } // namespace v8::internal |
3812 | 3823 |
3813 #endif // V8_TARGET_ARCH_IA32 | 3824 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |