Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(607)

Side by Side Diff: src/mips/lithium-codegen-mips.cc

Issue 128303002: Merged r18000, r18013, r18298, r18319 into 3.22 branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/3.22
Patch Set: Fix mips Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | src/mips/macro-assembler-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
91 info()->CommitDependencies(code); 91 info()->CommitDependencies(code);
92 } 92 }
93 93
94 94
95 void LChunkBuilder::Abort(BailoutReason reason) { 95 void LChunkBuilder::Abort(BailoutReason reason) {
96 info()->set_bailout_reason(reason); 96 info()->set_bailout_reason(reason);
97 status_ = ABORTED; 97 status_ = ABORTED;
98 } 98 }
99 99
100 100
101 void LCodeGen::SaveCallerDoubles() {
102 ASSERT(info()->saves_caller_doubles());
103 ASSERT(NeedsEagerFrame());
104 Comment(";;; Save clobbered callee double registers");
105 int count = 0;
106 BitVector* doubles = chunk()->allocated_double_registers();
107 BitVector::Iterator save_iterator(doubles);
108 while (!save_iterator.Done()) {
109 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
110 MemOperand(sp, count * kDoubleSize));
111 save_iterator.Advance();
112 count++;
113 }
114 }
115
116
117 void LCodeGen::RestoreCallerDoubles() {
118 ASSERT(info()->saves_caller_doubles());
119 ASSERT(NeedsEagerFrame());
120 Comment(";;; Restore clobbered callee double registers");
121 BitVector* doubles = chunk()->allocated_double_registers();
122 BitVector::Iterator save_iterator(doubles);
123 int count = 0;
124 while (!save_iterator.Done()) {
125 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
126 MemOperand(sp, count * kDoubleSize));
127 save_iterator.Advance();
128 count++;
129 }
130 }
131
132
101 bool LCodeGen::GeneratePrologue() { 133 bool LCodeGen::GeneratePrologue() {
102 ASSERT(is_generating()); 134 ASSERT(is_generating());
103 135
104 if (info()->IsOptimizing()) { 136 if (info()->IsOptimizing()) {
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106 138
107 #ifdef DEBUG 139 #ifdef DEBUG
108 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
109 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 141 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110 __ stop("stop_at"); 142 __ stop("stop_at");
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
153 __ sw(a1, MemOperand(a0, 2 * kPointerSize)); 185 __ sw(a1, MemOperand(a0, 2 * kPointerSize));
154 __ Branch(&loop, ne, a0, Operand(sp)); 186 __ Branch(&loop, ne, a0, Operand(sp));
155 __ pop(a1); 187 __ pop(a1);
156 __ pop(a0); 188 __ pop(a0);
157 } else { 189 } else {
158 __ Subu(sp, sp, Operand(slots * kPointerSize)); 190 __ Subu(sp, sp, Operand(slots * kPointerSize));
159 } 191 }
160 } 192 }
161 193
162 if (info()->saves_caller_doubles()) { 194 if (info()->saves_caller_doubles()) {
163 Comment(";;; Save clobbered callee double registers"); 195 SaveCallerDoubles();
164 int count = 0;
165 BitVector* doubles = chunk()->allocated_double_registers();
166 BitVector::Iterator save_iterator(doubles);
167 while (!save_iterator.Done()) {
168 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
169 MemOperand(sp, count * kDoubleSize));
170 save_iterator.Advance();
171 count++;
172 }
173 } 196 }
174 197
175 // Possibly allocate a local context. 198 // Possibly allocate a local context.
176 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 199 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
177 if (heap_slots > 0) { 200 if (heap_slots > 0) {
178 Comment(";;; Allocate local context"); 201 Comment(";;; Allocate local context");
179 // Argument to NewContext is the function, which is in a1. 202 // Argument to NewContext is the function, which is in a1.
180 __ push(a1); 203 __ push(a1);
181 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 204 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
182 FastNewContextStub stub(heap_slots); 205 FastNewContextStub stub(heap_slots);
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
291 Address entry = deopt_jump_table_[i].address; 314 Address entry = deopt_jump_table_[i].address;
292 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; 315 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
293 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); 316 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
294 if (id == Deoptimizer::kNotDeoptimizationEntry) { 317 if (id == Deoptimizer::kNotDeoptimizationEntry) {
295 Comment(";;; jump table entry %d.", i); 318 Comment(";;; jump table entry %d.", i);
296 } else { 319 } else {
297 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 320 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
298 } 321 }
299 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); 322 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry)));
300 if (deopt_jump_table_[i].needs_frame) { 323 if (deopt_jump_table_[i].needs_frame) {
324 ASSERT(!info()->saves_caller_doubles());
301 if (needs_frame.is_bound()) { 325 if (needs_frame.is_bound()) {
302 __ Branch(&needs_frame); 326 __ Branch(&needs_frame);
303 } else { 327 } else {
304 __ bind(&needs_frame); 328 __ bind(&needs_frame);
305 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); 329 __ MultiPush(cp.bit() | fp.bit() | ra.bit());
306 // This variant of deopt can only be used with stubs. Since we don't 330 // This variant of deopt can only be used with stubs. Since we don't
307 // have a function pointer to install in the stack frame that we're 331 // have a function pointer to install in the stack frame that we're
308 // building, install a special marker there instead. 332 // building, install a special marker there instead.
309 ASSERT(info()->IsStub()); 333 ASSERT(info()->IsStub());
310 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); 334 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
311 __ push(scratch0()); 335 __ push(scratch0());
312 __ Addu(fp, sp, Operand(2 * kPointerSize)); 336 __ Addu(fp, sp, Operand(2 * kPointerSize));
313 __ Call(t9); 337 __ Call(t9);
314 } 338 }
315 } else { 339 } else {
340 if (info()->saves_caller_doubles()) {
341 ASSERT(info()->IsStub());
342 RestoreCallerDoubles();
343 }
316 __ Call(t9); 344 __ Call(t9);
317 } 345 }
318 } 346 }
319 __ RecordComment("]"); 347 __ RecordComment("]");
320 348
321 // The deoptimization jump table is the last part of the instruction 349 // The deoptimization jump table is the last part of the instruction
322 // sequence. Mark the generated code as done unless we bailed out. 350 // sequence. Mark the generated code as done unless we bailed out.
323 if (!is_aborted()) status_ = DONE; 351 if (!is_aborted()) status_ = DONE;
324 return !is_aborted(); 352 return !is_aborted();
325 } 353 }
(...skipping 424 matching lines...) Expand 10 before | Expand all | Expand 10 after
750 if (info()->ShouldTrapOnDeopt()) { 778 if (info()->ShouldTrapOnDeopt()) {
751 Label skip; 779 Label skip;
752 if (condition != al) { 780 if (condition != al) {
753 __ Branch(&skip, NegateCondition(condition), src1, src2); 781 __ Branch(&skip, NegateCondition(condition), src1, src2);
754 } 782 }
755 __ stop("trap_on_deopt"); 783 __ stop("trap_on_deopt");
756 __ bind(&skip); 784 __ bind(&skip);
757 } 785 }
758 786
759 ASSERT(info()->IsStub() || frame_is_built_); 787 ASSERT(info()->IsStub() || frame_is_built_);
760 if (condition == al && frame_is_built_) { 788 // Go through jump table if we need to handle condition, build frame, or
789 // restore caller doubles.
790 if (condition == al && frame_is_built_ &&
791 !info()->saves_caller_doubles()) {
761 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); 792 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
762 } else { 793 } else {
763 // We often have several deopts to the same entry, reuse the last 794 // We often have several deopts to the same entry, reuse the last
764 // jump entry if this is the case. 795 // jump entry if this is the case.
765 if (deopt_jump_table_.is_empty() || 796 if (deopt_jump_table_.is_empty() ||
766 (deopt_jump_table_.last().address != entry) || 797 (deopt_jump_table_.last().address != entry) ||
767 (deopt_jump_table_.last().bailout_type != bailout_type) || 798 (deopt_jump_table_.last().bailout_type != bailout_type) ||
768 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { 799 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
769 Deoptimizer::JumpTableEntry table_entry(entry, 800 Deoptimizer::JumpTableEntry table_entry(entry,
770 bailout_type, 801 bailout_type,
(...skipping 1928 matching lines...) Expand 10 before | Expand all | Expand 10 after
2699 if (FLAG_trace && info()->IsOptimizing()) { 2730 if (FLAG_trace && info()->IsOptimizing()) {
2700 // Push the return value on the stack as the parameter. 2731 // Push the return value on the stack as the parameter.
2701 // Runtime::TraceExit returns its parameter in v0. We're leaving the code 2732 // Runtime::TraceExit returns its parameter in v0. We're leaving the code
2702 // managed by the register allocator and tearing down the frame, it's 2733 // managed by the register allocator and tearing down the frame, it's
2703 // safe to write to the context register. 2734 // safe to write to the context register.
2704 __ push(v0); 2735 __ push(v0);
2705 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2736 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2706 __ CallRuntime(Runtime::kTraceExit, 1); 2737 __ CallRuntime(Runtime::kTraceExit, 1);
2707 } 2738 }
2708 if (info()->saves_caller_doubles()) { 2739 if (info()->saves_caller_doubles()) {
2709 ASSERT(NeedsEagerFrame()); 2740 RestoreCallerDoubles();
2710 BitVector* doubles = chunk()->allocated_double_registers();
2711 BitVector::Iterator save_iterator(doubles);
2712 int count = 0;
2713 while (!save_iterator.Done()) {
2714 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
2715 MemOperand(sp, count * kDoubleSize));
2716 save_iterator.Advance();
2717 count++;
2718 }
2719 } 2741 }
2720 int no_frame_start = -1; 2742 int no_frame_start = -1;
2721 if (NeedsEagerFrame()) { 2743 if (NeedsEagerFrame()) {
2722 __ mov(sp, fp); 2744 __ mov(sp, fp);
2723 no_frame_start = masm_->pc_offset(); 2745 no_frame_start = masm_->pc_offset();
2724 __ Pop(ra, fp); 2746 __ Pop(ra, fp);
2725 } 2747 }
2726 if (instr->has_constant_parameter_count()) { 2748 if (instr->has_constant_parameter_count()) {
2727 int parameter_count = ToInteger32(instr->constant_parameter_count()); 2749 int parameter_count = ToInteger32(instr->constant_parameter_count());
2728 int32_t sp_delta = (parameter_count + 1) * kPointerSize; 2750 int32_t sp_delta = (parameter_count + 1) * kPointerSize;
(...skipping 3089 matching lines...) Expand 10 before | Expand all | Expand 10 after
5818 __ Subu(scratch, result, scratch); 5840 __ Subu(scratch, result, scratch);
5819 __ lw(result, FieldMemOperand(scratch, 5841 __ lw(result, FieldMemOperand(scratch,
5820 FixedArray::kHeaderSize - kPointerSize)); 5842 FixedArray::kHeaderSize - kPointerSize));
5821 __ bind(&done); 5843 __ bind(&done);
5822 } 5844 }
5823 5845
5824 5846
5825 #undef __ 5847 #undef __
5826 5848
5827 } } // namespace v8::internal 5849 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | src/mips/macro-assembler-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698