Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(801)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 128303002: Merged r18000, r18013, r18298, r18319 into 3.22 branch. (Closed) Base URL: https://v8.googlecode.com/svn/branches/3.22
Patch Set: Fix mips Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
91 info()->CommitDependencies(code); 91 info()->CommitDependencies(code);
92 } 92 }
93 93
94 94
95 void LCodeGen::Abort(BailoutReason reason) { 95 void LCodeGen::Abort(BailoutReason reason) {
96 info()->set_bailout_reason(reason); 96 info()->set_bailout_reason(reason);
97 status_ = ABORTED; 97 status_ = ABORTED;
98 } 98 }
99 99
100 100
101 void LCodeGen::SaveCallerDoubles() {
102 ASSERT(info()->saves_caller_doubles());
103 ASSERT(NeedsEagerFrame());
104 Comment(";;; Save clobbered callee double registers");
105 int count = 0;
106 BitVector* doubles = chunk()->allocated_double_registers();
107 BitVector::Iterator save_iterator(doubles);
108 while (!save_iterator.Done()) {
109 __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
110 MemOperand(sp, count * kDoubleSize));
111 save_iterator.Advance();
112 count++;
113 }
114 }
115
116
117 void LCodeGen::RestoreCallerDoubles() {
118 ASSERT(info()->saves_caller_doubles());
119 ASSERT(NeedsEagerFrame());
120 Comment(";;; Restore clobbered callee double registers");
121 BitVector* doubles = chunk()->allocated_double_registers();
122 BitVector::Iterator save_iterator(doubles);
123 int count = 0;
124 while (!save_iterator.Done()) {
125 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
126 MemOperand(sp, count * kDoubleSize));
127 save_iterator.Advance();
128 count++;
129 }
130 }
131
132
101 bool LCodeGen::GeneratePrologue() { 133 bool LCodeGen::GeneratePrologue() {
102 ASSERT(is_generating()); 134 ASSERT(is_generating());
103 135
104 if (info()->IsOptimizing()) { 136 if (info()->IsOptimizing()) {
105 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106 138
107 #ifdef DEBUG 139 #ifdef DEBUG
108 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
109 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 141 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
110 __ stop("stop_at"); 142 __ stop("stop_at");
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
151 __ cmp(r0, sp); 183 __ cmp(r0, sp);
152 __ b(ne, &loop); 184 __ b(ne, &loop);
153 __ pop(r1); 185 __ pop(r1);
154 __ pop(r0); 186 __ pop(r0);
155 } else { 187 } else {
156 __ sub(sp, sp, Operand(slots * kPointerSize)); 188 __ sub(sp, sp, Operand(slots * kPointerSize));
157 } 189 }
158 } 190 }
159 191
160 if (info()->saves_caller_doubles()) { 192 if (info()->saves_caller_doubles()) {
161 Comment(";;; Save clobbered callee double registers"); 193 SaveCallerDoubles();
162 int count = 0;
163 BitVector* doubles = chunk()->allocated_double_registers();
164 BitVector::Iterator save_iterator(doubles);
165 while (!save_iterator.Done()) {
166 __ vstr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
167 MemOperand(sp, count * kDoubleSize));
168 save_iterator.Advance();
169 count++;
170 }
171 } 194 }
172 195
173 // Possibly allocate a local context. 196 // Possibly allocate a local context.
174 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 197 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
175 if (heap_slots > 0) { 198 if (heap_slots > 0) {
176 Comment(";;; Allocate local context"); 199 Comment(";;; Allocate local context");
177 // Argument to NewContext is the function, which is in r1. 200 // Argument to NewContext is the function, which is in r1.
178 __ push(r1); 201 __ push(r1);
179 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 202 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
180 FastNewContextStub stub(heap_slots); 203 FastNewContextStub stub(heap_slots);
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
306 __ bind(&deopt_jump_table_[i].label); 329 __ bind(&deopt_jump_table_[i].label);
307 Address entry = deopt_jump_table_[i].address; 330 Address entry = deopt_jump_table_[i].address;
308 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; 331 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
309 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); 332 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
310 if (id == Deoptimizer::kNotDeoptimizationEntry) { 333 if (id == Deoptimizer::kNotDeoptimizationEntry) {
311 Comment(";;; jump table entry %d.", i); 334 Comment(";;; jump table entry %d.", i);
312 } else { 335 } else {
313 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 336 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
314 } 337 }
315 if (deopt_jump_table_[i].needs_frame) { 338 if (deopt_jump_table_[i].needs_frame) {
339 ASSERT(!info()->saves_caller_doubles());
316 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry))); 340 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry)));
317 if (needs_frame.is_bound()) { 341 if (needs_frame.is_bound()) {
318 __ b(&needs_frame); 342 __ b(&needs_frame);
319 } else { 343 } else {
320 __ bind(&needs_frame); 344 __ bind(&needs_frame);
321 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); 345 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
322 // This variant of deopt can only be used with stubs. Since we don't 346 // This variant of deopt can only be used with stubs. Since we don't
323 // have a function pointer to install in the stack frame that we're 347 // have a function pointer to install in the stack frame that we're
324 // building, install a special marker there instead. 348 // building, install a special marker there instead.
325 ASSERT(info()->IsStub()); 349 ASSERT(info()->IsStub());
326 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); 350 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
327 __ push(scratch0()); 351 __ push(scratch0());
328 __ add(fp, sp, Operand(2 * kPointerSize)); 352 __ add(fp, sp, Operand(2 * kPointerSize));
329 __ mov(lr, Operand(pc), LeaveCC, al); 353 __ mov(lr, Operand(pc), LeaveCC, al);
330 __ mov(pc, ip); 354 __ mov(pc, ip);
331 } 355 }
332 } else { 356 } else {
357 if (info()->saves_caller_doubles()) {
358 ASSERT(info()->IsStub());
359 RestoreCallerDoubles();
360 }
333 __ mov(lr, Operand(pc), LeaveCC, al); 361 __ mov(lr, Operand(pc), LeaveCC, al);
334 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); 362 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry)));
335 } 363 }
336 masm()->CheckConstPool(false, false); 364 masm()->CheckConstPool(false, false);
337 } 365 }
338 366
339 // Force constant pool emission at the end of the deopt jump table to make 367 // Force constant pool emission at the end of the deopt jump table to make
340 // sure that no constant pools are emitted after. 368 // sure that no constant pools are emitted after.
341 masm()->CheckConstPool(true, false); 369 masm()->CheckConstPool(true, false);
342 370
(...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after
776 ASSERT(frame_is_built_); 804 ASSERT(frame_is_built_);
777 __ Call(entry, RelocInfo::RUNTIME_ENTRY); 805 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
778 return; 806 return;
779 } 807 }
780 808
781 if (info()->ShouldTrapOnDeopt()) { 809 if (info()->ShouldTrapOnDeopt()) {
782 __ stop("trap_on_deopt", condition); 810 __ stop("trap_on_deopt", condition);
783 } 811 }
784 812
785 ASSERT(info()->IsStub() || frame_is_built_); 813 ASSERT(info()->IsStub() || frame_is_built_);
786 if (condition == al && frame_is_built_) { 814 // Go through jump table if we need to handle condition, build frame, or
815 // restore caller doubles.
816 if (condition == al && frame_is_built_ &&
817 !info()->saves_caller_doubles()) {
787 __ Call(entry, RelocInfo::RUNTIME_ENTRY); 818 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
788 } else { 819 } else {
789 // We often have several deopts to the same entry, reuse the last 820 // We often have several deopts to the same entry, reuse the last
790 // jump entry if this is the case. 821 // jump entry if this is the case.
791 if (deopt_jump_table_.is_empty() || 822 if (deopt_jump_table_.is_empty() ||
792 (deopt_jump_table_.last().address != entry) || 823 (deopt_jump_table_.last().address != entry) ||
793 (deopt_jump_table_.last().bailout_type != bailout_type) || 824 (deopt_jump_table_.last().bailout_type != bailout_type) ||
794 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { 825 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
795 Deoptimizer::JumpTableEntry table_entry(entry, 826 Deoptimizer::JumpTableEntry table_entry(entry,
796 bailout_type, 827 bailout_type,
(...skipping 2049 matching lines...) Expand 10 before | Expand all | Expand 10 after
2846 if (FLAG_trace && info()->IsOptimizing()) { 2877 if (FLAG_trace && info()->IsOptimizing()) {
2847 // Push the return value on the stack as the parameter. 2878 // Push the return value on the stack as the parameter.
2848 // Runtime::TraceExit returns its parameter in r0. We're leaving the code 2879 // Runtime::TraceExit returns its parameter in r0. We're leaving the code
2849 // managed by the register allocator and tearing down the frame, it's 2880 // managed by the register allocator and tearing down the frame, it's
2850 // safe to write to the context register. 2881 // safe to write to the context register.
2851 __ push(r0); 2882 __ push(r0);
2852 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2883 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2853 __ CallRuntime(Runtime::kTraceExit, 1); 2884 __ CallRuntime(Runtime::kTraceExit, 1);
2854 } 2885 }
2855 if (info()->saves_caller_doubles()) { 2886 if (info()->saves_caller_doubles()) {
2856 ASSERT(NeedsEagerFrame()); 2887 RestoreCallerDoubles();
2857 BitVector* doubles = chunk()->allocated_double_registers();
2858 BitVector::Iterator save_iterator(doubles);
2859 int count = 0;
2860 while (!save_iterator.Done()) {
2861 __ vldr(DwVfpRegister::FromAllocationIndex(save_iterator.Current()),
2862 MemOperand(sp, count * kDoubleSize));
2863 save_iterator.Advance();
2864 count++;
2865 }
2866 } 2888 }
2867 int no_frame_start = -1; 2889 int no_frame_start = -1;
2868 if (NeedsEagerFrame()) { 2890 if (NeedsEagerFrame()) {
2869 __ mov(sp, fp); 2891 __ mov(sp, fp);
2870 no_frame_start = masm_->pc_offset(); 2892 no_frame_start = masm_->pc_offset();
2871 __ ldm(ia_w, sp, fp.bit() | lr.bit()); 2893 __ ldm(ia_w, sp, fp.bit() | lr.bit());
2872 } 2894 }
2873 if (instr->has_constant_parameter_count()) { 2895 if (instr->has_constant_parameter_count()) {
2874 int parameter_count = ToInteger32(instr->constant_parameter_count()); 2896 int parameter_count = ToInteger32(instr->constant_parameter_count());
2875 int32_t sp_delta = (parameter_count + 1) * kPointerSize; 2897 int32_t sp_delta = (parameter_count + 1) * kPointerSize;
(...skipping 2955 matching lines...) Expand 10 before | Expand all | Expand 10 after
5831 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); 5853 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5832 __ ldr(result, FieldMemOperand(scratch, 5854 __ ldr(result, FieldMemOperand(scratch,
5833 FixedArray::kHeaderSize - kPointerSize)); 5855 FixedArray::kHeaderSize - kPointerSize));
5834 __ bind(&done); 5856 __ bind(&done);
5835 } 5857 }
5836 5858
5837 5859
5838 #undef __ 5860 #undef __
5839 5861
5840 } } // namespace v8::internal 5862 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698