Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(535)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 6541053: Add more generic version of reloc info padding to ensure enough space for rel... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | test/mjsunit/regress/regress-1174.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 : codegen_(codegen), 48 : codegen_(codegen),
49 pointers_(pointers), 49 pointers_(pointers),
50 deoptimization_index_(deoptimization_index), 50 deoptimization_index_(deoptimization_index),
51 ensure_reloc_space_(ensure_reloc_space) { } 51 ensure_reloc_space_(ensure_reloc_space) { }
52 virtual ~SafepointGenerator() { } 52 virtual ~SafepointGenerator() { }
53 53
54 virtual void Generate() { 54 virtual void Generate() {
55 // Ensure that we have enough space in the reloc info to patch 55 // Ensure that we have enough space in the reloc info to patch
56 // this with calls when doing deoptimization. 56 // this with calls when doing deoptimization.
57 if (ensure_reloc_space_) { 57 if (ensure_reloc_space_) {
58 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); 58 codegen_->EnsureRelocSpaceForDeoptimization();
59 } 59 }
60 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 60 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
61 } 61 }
62 62
63 private: 63 private:
64 LCodeGen* codegen_; 64 LCodeGen* codegen_;
65 LPointerMap* pointers_; 65 LPointerMap* pointers_;
66 int deoptimization_index_; 66 int deoptimization_index_;
67 bool ensure_reloc_space_; 67 bool ensure_reloc_space_;
68 }; 68 };
69 69
70 70
71 #define __ masm()-> 71 #define __ masm()->
72 72
73 bool LCodeGen::GenerateCode() { 73 bool LCodeGen::GenerateCode() {
74 HPhase phase("Code generation", chunk()); 74 HPhase phase("Code generation", chunk());
75 ASSERT(is_unused()); 75 ASSERT(is_unused());
76 status_ = GENERATING; 76 status_ = GENERATING;
77 CpuFeatures::Scope scope(SSE2); 77 CpuFeatures::Scope scope(SSE2);
78 return GeneratePrologue() && 78 return GeneratePrologue() &&
79 GenerateBody() && 79 GenerateBody() &&
80 GenerateDeferredCode() && 80 GenerateDeferredCode() &&
81 GenerateRelocPadding() &&
81 GenerateSafepointTable(); 82 GenerateSafepointTable();
82 } 83 }
83 84
84 85
85 void LCodeGen::FinishCode(Handle<Code> code) { 86 void LCodeGen::FinishCode(Handle<Code> code) {
86 ASSERT(is_done()); 87 ASSERT(is_done());
87 code->set_stack_slots(StackSlotCount()); 88 code->set_stack_slots(StackSlotCount());
88 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 89 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
89 PopulateDeoptimizationData(code); 90 PopulateDeoptimizationData(code);
90 } 91 }
(...skipping 24 matching lines...) Expand all
115 116
116 // Copy the string before recording it in the assembler to avoid 117 // Copy the string before recording it in the assembler to avoid
117 // issues when the stack allocated buffer goes out of scope. 118 // issues when the stack allocated buffer goes out of scope.
118 size_t length = builder.position(); 119 size_t length = builder.position();
119 Vector<char> copy = Vector<char>::New(length + 1); 120 Vector<char> copy = Vector<char>::New(length + 1);
120 memcpy(copy.start(), builder.Finalize(), copy.length()); 121 memcpy(copy.start(), builder.Finalize(), copy.length());
121 masm()->RecordComment(copy.start()); 122 masm()->RecordComment(copy.start());
122 } 123 }
123 124
124 125
126 bool LCodeGen::GenerateRelocPadding() {
127 int reloc_size = masm()->relocation_writer_size();
128 while (reloc_size < deoptimization_reloc_size.min_size) {
129 __ RecordComment(RelocInfo::kFillerCommentString, true);
130 reloc_size += RelocInfo::kRelocCommentSize;
131 }
132 return !is_aborted();
133 }
134
135
125 bool LCodeGen::GeneratePrologue() { 136 bool LCodeGen::GeneratePrologue() {
126 ASSERT(is_generating()); 137 ASSERT(is_generating());
127 138
128 #ifdef DEBUG 139 #ifdef DEBUG
129 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
130 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 141 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
131 __ int3(); 142 __ int3();
132 } 143 }
133 #endif 144 #endif
134 145
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
328 environment->spilled_double_registers()[value->index()], 339 environment->spilled_double_registers()[value->index()],
329 false); 340 false);
330 } 341 }
331 } 342 }
332 343
333 AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); 344 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
334 } 345 }
335 } 346 }
336 347
337 348
349 void LCodeGen::EnsureRelocSpaceForDeoptimization() {
350 // Since we patch the reloc info with RUNTIME_ENTRY calls every patch
351 // site will take up 2 bytes + any pc-jumps.
352 // We are conservative and always reserver 6 bytes in case where a
353 // simple pc-jump is not enough.
354 uint32_t pc_delta =
355 masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset;
356 if (is_uintn(pc_delta, 6)) {
357 deoptimization_reloc_size.min_size += 2;
358 } else {
359 deoptimization_reloc_size.min_size += 6;
360 }
361 deoptimization_reloc_size.last_pc_offset = masm()->pc_offset();
362 }
363
364
338 void LCodeGen::AddToTranslation(Translation* translation, 365 void LCodeGen::AddToTranslation(Translation* translation,
339 LOperand* op, 366 LOperand* op,
340 bool is_tagged) { 367 bool is_tagged) {
341 if (op == NULL) { 368 if (op == NULL) {
342 // TODO(twuerthinger): Introduce marker operands to indicate that this value 369 // TODO(twuerthinger): Introduce marker operands to indicate that this value
343 // is not present and must be reconstructed from the deoptimizer. Currently 370 // is not present and must be reconstructed from the deoptimizer. Currently
344 // this is only used for the arguments object. 371 // this is only used for the arguments object.
345 translation->StoreArgumentsObject(); 372 translation->StoreArgumentsObject();
346 } else if (op->IsStackSlot()) { 373 } else if (op->IsStackSlot()) {
347 if (is_tagged) { 374 if (is_tagged) {
(...skipping 27 matching lines...) Expand all
375 } 402 }
376 403
377 404
378 void LCodeGen::CallCode(Handle<Code> code, 405 void LCodeGen::CallCode(Handle<Code> code,
379 RelocInfo::Mode mode, 406 RelocInfo::Mode mode,
380 LInstruction* instr, 407 LInstruction* instr,
381 bool adjusted) { 408 bool adjusted) {
382 ASSERT(instr != NULL); 409 ASSERT(instr != NULL);
383 LPointerMap* pointers = instr->pointer_map(); 410 LPointerMap* pointers = instr->pointer_map();
384 RecordPosition(pointers->position()); 411 RecordPosition(pointers->position());
412
385 if (!adjusted) { 413 if (!adjusted) {
386 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 414 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
387 } 415 }
388 __ call(code, mode); 416 __ call(code, mode);
417
418 EnsureRelocSpaceForDeoptimization();
389 RegisterLazyDeoptimization(instr); 419 RegisterLazyDeoptimization(instr);
390 420
391 // Signal that we don't inline smi code before these stubs in the 421 // Signal that we don't inline smi code before these stubs in the
392 // optimizing code generator. 422 // optimizing code generator.
393 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || 423 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
394 code->kind() == Code::COMPARE_IC) { 424 code->kind() == Code::COMPARE_IC) {
395 __ nop(); 425 __ nop();
396 } 426 }
397 } 427 }
398 428
(...skipping 1895 matching lines...) Expand 10 before | Expand all | Expand 10 after
2294 __ mov(eax, arity); 2324 __ mov(eax, arity);
2295 } 2325 }
2296 2326
2297 LPointerMap* pointers = instr->pointer_map(); 2327 LPointerMap* pointers = instr->pointer_map();
2298 RecordPosition(pointers->position()); 2328 RecordPosition(pointers->position());
2299 2329
2300 // Invoke function. 2330 // Invoke function.
2301 if (*function == *graph()->info()->closure()) { 2331 if (*function == *graph()->info()->closure()) {
2302 __ CallSelf(); 2332 __ CallSelf();
2303 } else { 2333 } else {
2304 // This is an indirect call and will not be recorded in the reloc info.
2305 // Add a comment to the reloc info in case we need to patch this during
2306 // deoptimization.
2307 __ RecordComment(RelocInfo::kFillerCommentString, true);
2308 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2334 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2335 EnsureRelocSpaceForDeoptimization();
2309 } 2336 }
2310 2337
2311 // Setup deoptimization. 2338 // Setup deoptimization.
2312 RegisterLazyDeoptimization(instr); 2339 RegisterLazyDeoptimization(instr);
2313 } 2340 }
2314 2341
2315 2342
2316 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 2343 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2317 ASSERT(ToRegister(instr->result()).is(eax)); 2344 ASSERT(ToRegister(instr->result()).is(eax));
2318 __ mov(edi, instr->function()); 2345 __ mov(edi, instr->function());
(...skipping 1485 matching lines...) Expand 10 before | Expand all | Expand 10 after
3804 ASSERT(osr_pc_offset_ == -1); 3831 ASSERT(osr_pc_offset_ == -1);
3805 osr_pc_offset_ = masm()->pc_offset(); 3832 osr_pc_offset_ = masm()->pc_offset();
3806 } 3833 }
3807 3834
3808 3835
3809 #undef __ 3836 #undef __
3810 3837
3811 } } // namespace v8::internal 3838 } } // namespace v8::internal
3812 3839
3813 #endif // V8_TARGET_ARCH_IA32 3840 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | test/mjsunit/regress/regress-1174.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698