Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(385)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 6541053: Add more generic version of reloc info padding to ensure enough space for rel... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 : codegen_(codegen), 48 : codegen_(codegen),
49 pointers_(pointers), 49 pointers_(pointers),
50 deoptimization_index_(deoptimization_index), 50 deoptimization_index_(deoptimization_index),
51 ensure_reloc_space_(ensure_reloc_space) { } 51 ensure_reloc_space_(ensure_reloc_space) { }
52 virtual ~SafepointGenerator() { } 52 virtual ~SafepointGenerator() { }
53 53
54 virtual void Generate() { 54 virtual void Generate() {
55 // Ensure that we have enough space in the reloc info to patch 55 // Ensure that we have enough space in the reloc info to patch
56 // this with calls when doing deoptimization. 56 // this with calls when doing deoptimization.
57 if (ensure_reloc_space_) { 57 if (ensure_reloc_space_) {
58 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); 58 // We can use up to RelocInfo::kMaxCallSize bytes for storing call,
59 // if this includes a long variable length pc-jump.
60 codegen_->AddRelocPadding(RelocInfo::kMaxCallSize);
59 } 61 }
60 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 62 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
61 } 63 }
62 64
63 private: 65 private:
64 LCodeGen* codegen_; 66 LCodeGen* codegen_;
65 LPointerMap* pointers_; 67 LPointerMap* pointers_;
66 int deoptimization_index_; 68 int deoptimization_index_;
67 bool ensure_reloc_space_; 69 bool ensure_reloc_space_;
68 }; 70 };
69 71
70 72
71 #define __ masm()-> 73 #define __ masm()->
72 74
73 bool LCodeGen::GenerateCode() { 75 bool LCodeGen::GenerateCode() {
74 HPhase phase("Code generation", chunk()); 76 HPhase phase("Code generation", chunk());
75 ASSERT(is_unused()); 77 ASSERT(is_unused());
76 status_ = GENERATING; 78 status_ = GENERATING;
77 CpuFeatures::Scope scope(SSE2); 79 CpuFeatures::Scope scope(SSE2);
78 return GeneratePrologue() && 80 return GeneratePrologue() &&
79 GenerateBody() && 81 GenerateBody() &&
80 GenerateDeferredCode() && 82 GenerateDeferredCode() &&
83 GenerateRelocPadding() &&
81 GenerateSafepointTable(); 84 GenerateSafepointTable();
82 } 85 }
83 86
84 87
85 void LCodeGen::FinishCode(Handle<Code> code) { 88 void LCodeGen::FinishCode(Handle<Code> code) {
86 ASSERT(is_done()); 89 ASSERT(is_done());
87 code->set_stack_slots(StackSlotCount()); 90 code->set_stack_slots(StackSlotCount());
88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 91 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
89 PopulateDeoptimizationData(code); 92 PopulateDeoptimizationData(code);
90 } 93 }
(...skipping 24 matching lines...) Expand all
115 118
116 // Copy the string before recording it in the assembler to avoid 119 // Copy the string before recording it in the assembler to avoid
117 // issues when the stack allocated buffer goes out of scope. 120 // issues when the stack allocated buffer goes out of scope.
118 size_t length = builder.position(); 121 size_t length = builder.position();
119 Vector<char> copy = Vector<char>::New(length + 1); 122 Vector<char> copy = Vector<char>::New(length + 1);
120 memcpy(copy.start(), builder.Finalize(), copy.length()); 123 memcpy(copy.start(), builder.Finalize(), copy.length());
121 masm()->RecordComment(copy.start()); 124 masm()->RecordComment(copy.start());
122 } 125 }
123 126
124 127
128 bool LCodeGen::GenerateRelocPadding() {
129 while(reloc_padding_count_ > 0) {
130 __ RecordComment(RelocInfo::kFillerCommentString, true);
131 reloc_padding_count_ -= RelocInfo::kRelocCommentSize;
132 }
133 return !is_aborted();
134 }
135
136
125 bool LCodeGen::GeneratePrologue() { 137 bool LCodeGen::GeneratePrologue() {
126 ASSERT(is_generating()); 138 ASSERT(is_generating());
127 139
128 #ifdef DEBUG 140 #ifdef DEBUG
129 if (strlen(FLAG_stop_at) > 0 && 141 if (strlen(FLAG_stop_at) > 0 &&
130 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 142 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
131 __ int3(); 143 __ int3();
132 } 144 }
133 #endif 145 #endif
134 146
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after
374 } 386 }
375 } 387 }
376 388
377 389
378 void LCodeGen::CallCode(Handle<Code> code, 390 void LCodeGen::CallCode(Handle<Code> code,
379 RelocInfo::Mode mode, 391 RelocInfo::Mode mode,
380 LInstruction* instr, 392 LInstruction* instr,
381 bool adjusted) { 393 bool adjusted) {
382 ASSERT(instr != NULL); 394 ASSERT(instr != NULL);
383 LPointerMap* pointers = instr->pointer_map(); 395 LPointerMap* pointers = instr->pointer_map();
384 RecordPosition(pointers->position()); 396 RecordPosition(pointers->position());
Søren Thygesen Gjesse 2011/02/21 13:27:20 Move this comment and code to after the '__ call'?
397 // A call will only take up 1 byte in the reloc_info, but patching in the
398 // deoptimizer will take up two since it uses RUNTIME_ENTY as relocation
399 // mode to avoid issues with GC. Any pc-jumps will be the same for both
400 // relocation info objects since we patch at the same spot as the original
401 // call.
402 AddRelocPadding(1);
403
385 if (!adjusted) { 404 if (!adjusted) {
386 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 405 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
387 } 406 }
388 __ call(code, mode); 407 __ call(code, mode);
389 RegisterLazyDeoptimization(instr); 408 RegisterLazyDeoptimization(instr);
390 409
391 // Signal that we don't inline smi code before these stubs in the 410 // Signal that we don't inline smi code before these stubs in the
392 // optimizing code generator. 411 // optimizing code generator.
393 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || 412 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
394 code->kind() == Code::COMPARE_IC) { 413 code->kind() == Code::COMPARE_IC) {
(...skipping 1900 matching lines...) Expand 10 before | Expand all | Expand 10 after
2295 } 2314 }
2296 2315
2297 LPointerMap* pointers = instr->pointer_map(); 2316 LPointerMap* pointers = instr->pointer_map();
2298 RecordPosition(pointers->position()); 2317 RecordPosition(pointers->position());
2299 2318
2300 // Invoke function. 2319 // Invoke function.
2301 if (*function == *graph()->info()->closure()) { 2320 if (*function == *graph()->info()->closure()) {
2302 __ CallSelf(); 2321 __ CallSelf();
2303 } else { 2322 } else {
2304 // This is an indirect call and will not be recorded in the reloc info. 2323 // This is an indirect call and will not be recorded in the reloc info.
2305 // Add a comment to the reloc info in case we need to patch this during 2324 // We can use up to RelocInfo::kMaxCallSize bytes for storing call, if
2306 // deoptimization. 2325 // this includes a long variable length pc-jump.
2307 __ RecordComment(RelocInfo::kFillerCommentString, true); 2326 AddRelocPadding(RelocInfo::kMaxCallSize);
2308 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2327 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2309 } 2328 }
2310 2329
2311 // Setup deoptimization. 2330 // Setup deoptimization.
2312 RegisterLazyDeoptimization(instr); 2331 RegisterLazyDeoptimization(instr);
2313 } 2332 }
2314 2333
2315 2334
2316 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2335 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2317 ASSERT(ToRegister(instr->result()).is(eax)); 2336 ASSERT(ToRegister(instr->result()).is(eax));
(...skipping 1486 matching lines...) Expand 10 before | Expand all | Expand 10 after
3804 ASSERT(osr_pc_offset_ == -1); 3823 ASSERT(osr_pc_offset_ == -1);
3805 osr_pc_offset_ = masm()->pc_offset(); 3824 osr_pc_offset_ = masm()->pc_offset();
3806 } 3825 }
3807 3826
3808 3827
3809 #undef __ 3828 #undef __
3810 3829
3811 } } // namespace v8::internal 3830 } } // namespace v8::internal
3812 3831
3813 #endif // V8_TARGET_ARCH_IA32 3832 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698