Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(379)

Side by Side Diff: src/arm/virtual-frame-arm.h

Issue 6811012: Remove some dead code. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/stub-cache-arm.cc ('k') | src/arm/virtual-frame-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_ARM_VIRTUAL_FRAME_ARM_H_
29 #define V8_ARM_VIRTUAL_FRAME_ARM_H_
30
31 #include "register-allocator.h"
32
33 namespace v8 {
34 namespace internal {
35
36 // This dummy class is only used to create invalid virtual frames.
37 extern class InvalidVirtualFrameInitializer {}* kInvalidVirtualFrameInitializer;
38
39
40 // -------------------------------------------------------------------------
41 // Virtual frames
42 //
43 // The virtual frame is an abstraction of the physical stack frame. It
44 // encapsulates the parameters, frame-allocated locals, and the expression
45 // stack. It supports push/pop operations on the expression stack, as well
46 // as random access to the expression stack elements, locals, and
47 // parameters.
48
49 class VirtualFrame : public ZoneObject {
50 public:
51 class RegisterAllocationScope;
52 // A utility class to introduce a scope where the virtual frame is
53 // expected to remain spilled. The constructor spills the code
54 // generator's current frame, and keeps it spilled.
55 class SpilledScope BASE_EMBEDDED {
56 public:
57 explicit SpilledScope(VirtualFrame* frame)
58 : old_is_spilled_(
59 Isolate::Current()->is_virtual_frame_in_spilled_scope()) {
60 if (frame != NULL) {
61 if (!old_is_spilled_) {
62 frame->SpillAll();
63 } else {
64 frame->AssertIsSpilled();
65 }
66 }
67 Isolate::Current()->set_is_virtual_frame_in_spilled_scope(true);
68 }
69 ~SpilledScope() {
70 Isolate::Current()->set_is_virtual_frame_in_spilled_scope(
71 old_is_spilled_);
72 }
73 static bool is_spilled() {
74 return Isolate::Current()->is_virtual_frame_in_spilled_scope();
75 }
76
77 private:
78 int old_is_spilled_;
79
80 SpilledScope() { }
81
82 friend class RegisterAllocationScope;
83 };
84
85 class RegisterAllocationScope BASE_EMBEDDED {
86 public:
87 // A utility class to introduce a scope where the virtual frame
88 // is not spilled, ie. where register allocation occurs. Eventually
89 // when RegisterAllocationScope is ubiquitous it can be removed
90 // along with the (by then unused) SpilledScope class.
91 inline explicit RegisterAllocationScope(CodeGenerator* cgen);
92 inline ~RegisterAllocationScope();
93
94 private:
95 CodeGenerator* cgen_;
96 bool old_is_spilled_;
97
98 RegisterAllocationScope() { }
99 };
100
101 // An illegal index into the virtual frame.
102 static const int kIllegalIndex = -1;
103
104 // Construct an initial virtual frame on entry to a JS function.
105 inline VirtualFrame();
106
107 // Construct an invalid virtual frame, used by JumpTargets.
108 inline VirtualFrame(InvalidVirtualFrameInitializer* dummy);
109
110 // Construct a virtual frame as a clone of an existing one.
111 explicit inline VirtualFrame(VirtualFrame* original);
112
113 inline CodeGenerator* cgen() const;
114 inline MacroAssembler* masm();
115
116 // The number of elements on the virtual frame.
117 int element_count() const { return element_count_; }
118
119 // The height of the virtual expression stack.
120 inline int height() const;
121
122 bool is_used(int num) {
123 switch (num) {
124 case 0: { // r0.
125 return kR0InUse[top_of_stack_state_];
126 }
127 case 1: { // r1.
128 return kR1InUse[top_of_stack_state_];
129 }
130 case 2:
131 case 3:
132 case 4:
133 case 5:
134 case 6: { // r2 to r6.
135 ASSERT(num - kFirstAllocatedRegister < kNumberOfAllocatedRegisters);
136 ASSERT(num >= kFirstAllocatedRegister);
137 if ((register_allocation_map_ &
138 (1 << (num - kFirstAllocatedRegister))) == 0) {
139 return false;
140 } else {
141 return true;
142 }
143 }
144 default: {
145 ASSERT(num < kFirstAllocatedRegister ||
146 num >= kFirstAllocatedRegister + kNumberOfAllocatedRegisters);
147 return false;
148 }
149 }
150 }
151
152 // Add extra in-memory elements to the top of the frame to match an actual
153 // frame (eg, the frame after an exception handler is pushed). No code is
154 // emitted.
155 void Adjust(int count);
156
157 // Forget elements from the top of the frame to match an actual frame (eg,
158 // the frame after a runtime call). No code is emitted except to bring the
159 // frame to a spilled state.
160 void Forget(int count);
161
162 // Spill all values from the frame to memory.
163 void SpillAll();
164
165 void AssertIsSpilled() const {
166 ASSERT(top_of_stack_state_ == NO_TOS_REGISTERS);
167 ASSERT(register_allocation_map_ == 0);
168 }
169
170 void AssertIsNotSpilled() {
171 ASSERT(!SpilledScope::is_spilled());
172 }
173
174 // Spill all occurrences of a specific register from the frame.
175 void Spill(Register reg) {
176 UNIMPLEMENTED();
177 }
178
179 // Spill all occurrences of an arbitrary register if possible. Return the
180 // register spilled or no_reg if it was not possible to free any register
181 // (ie, they all have frame-external references). Unimplemented.
182 Register SpillAnyRegister();
183
184 // Make this virtual frame have a state identical to an expected virtual
185 // frame. As a side effect, code may be emitted to make this frame match
186 // the expected one.
187 void MergeTo(VirtualFrame* expected, Condition cond = al);
188 void MergeTo(const VirtualFrame* expected, Condition cond = al);
189
190 // Checks whether this frame can be branched to by the other frame.
191 bool IsCompatibleWith(const VirtualFrame* other) const {
192 return (tos_known_smi_map_ & (~other->tos_known_smi_map_)) == 0;
193 }
194
195 inline void ForgetTypeInfo() {
196 tos_known_smi_map_ = 0;
197 }
198
199 // Detach a frame from its code generator, perhaps temporarily. This
200 // tells the register allocator that it is free to use frame-internal
201 // registers. Used when the code generator's frame is switched from this
202 // one to NULL by an unconditional jump.
203 void DetachFromCodeGenerator() {
204 }
205
206 // (Re)attach a frame to its code generator. This informs the register
207 // allocator that the frame-internal register references are active again.
208 // Used when a code generator's frame is switched from NULL to this one by
209 // binding a label.
210 void AttachToCodeGenerator() {
211 }
212
213 // Emit code for the physical JS entry and exit frame sequences. After
214 // calling Enter, the virtual frame is ready for use; and after calling
215 // Exit it should not be used. Note that Enter does not allocate space in
216 // the physical frame for storing frame-allocated locals.
217 void Enter();
218 void Exit();
219
220 // Prepare for returning from the frame by elements in the virtual frame. This
221 // avoids generating unnecessary merge code when jumping to the
222 // shared return site. No spill code emitted. Value to return should be in r0.
223 inline void PrepareForReturn();
224
225 // Number of local variables after when we use a loop for allocating.
226 static const int kLocalVarBound = 5;
227
228 // Allocate and initialize the frame-allocated locals.
229 void AllocateStackSlots();
230
231 // The current top of the expression stack as an assembly operand.
232 MemOperand Top() {
233 AssertIsSpilled();
234 return MemOperand(sp, 0);
235 }
236
237 // An element of the expression stack as an assembly operand.
238 MemOperand ElementAt(int index) {
239 int adjusted_index = index - kVirtualElements[top_of_stack_state_];
240 ASSERT(adjusted_index >= 0);
241 return MemOperand(sp, adjusted_index * kPointerSize);
242 }
243
244 bool KnownSmiAt(int index) {
245 if (index >= kTOSKnownSmiMapSize) return false;
246 return (tos_known_smi_map_ & (1 << index)) != 0;
247 }
248
249 // A frame-allocated local as an assembly operand.
250 inline MemOperand LocalAt(int index);
251
252 // Push the address of the receiver slot on the frame.
253 void PushReceiverSlotAddress();
254
255 // The function frame slot.
256 MemOperand Function() { return MemOperand(fp, kFunctionOffset); }
257
258 // The context frame slot.
259 MemOperand Context() { return MemOperand(fp, kContextOffset); }
260
261 // A parameter as an assembly operand.
262 inline MemOperand ParameterAt(int index);
263
264 // The receiver frame slot.
265 inline MemOperand Receiver();
266
267 // Push a try-catch or try-finally handler on top of the virtual frame.
268 void PushTryHandler(HandlerType type);
269
270 // Call stub given the number of arguments it expects on (and
271 // removes from) the stack.
272 inline void CallStub(CodeStub* stub, int arg_count);
273
274 // Call JS function from top of the stack with arguments
275 // taken from the stack.
276 void CallJSFunction(int arg_count);
277
278 // Call runtime given the number of arguments expected on (and
279 // removed from) the stack.
280 void CallRuntime(const Runtime::Function* f, int arg_count);
281 void CallRuntime(Runtime::FunctionId id, int arg_count);
282
283 #ifdef ENABLE_DEBUGGER_SUPPORT
284 void DebugBreak();
285 #endif
286
287 // Invoke builtin given the number of arguments it expects on (and
288 // removes from) the stack.
289 void InvokeBuiltin(Builtins::JavaScript id,
290 InvokeJSFlags flag,
291 int arg_count);
292
293 // Call load IC. Receiver is on the stack and is consumed. Result is returned
294 // in r0.
295 void CallLoadIC(Handle<String> name, RelocInfo::Mode mode);
296
297 // Call store IC. If the load is contextual, value is found on top of the
298 // frame. If not, value and receiver are on the frame. Both are consumed.
299 // Result is returned in r0.
300 void CallStoreIC(Handle<String> name, bool is_contextual,
301 StrictModeFlag strict_mode);
302
303 // Call keyed load IC. Key and receiver are on the stack. Both are consumed.
304 // Result is returned in r0.
305 void CallKeyedLoadIC();
306
307 // Call keyed store IC. Value, key and receiver are on the stack. All three
308 // are consumed. Result is returned in r0.
309 void CallKeyedStoreIC(StrictModeFlag strict_mode);
310
311 // Call into an IC stub given the number of arguments it removes
312 // from the stack. Register arguments to the IC stub are implicit,
313 // and depend on the type of IC stub.
314 void CallCodeObject(Handle<Code> ic,
315 RelocInfo::Mode rmode,
316 int dropped_args);
317
318 // Drop a number of elements from the top of the expression stack. May
319 // emit code to affect the physical frame. Does not clobber any registers
320 // excepting possibly the stack pointer.
321 void Drop(int count);
322
323 // Drop one element.
324 void Drop() { Drop(1); }
325
326 // Pop an element from the top of the expression stack. Discards
327 // the result.
328 void Pop();
329
330 // Pop an element from the top of the expression stack. The register
331 // will be one normally used for the top of stack register allocation
332 // so you can't hold on to it if you push on the stack.
333 Register PopToRegister(Register but_not_to_this_one = no_reg);
334
335 // Look at the top of the stack. The register returned is aliased and
336 // must be copied to a scratch register before modification.
337 Register Peek();
338
339 // Look at the value beneath the top of the stack. The register returned is
340 // aliased and must be copied to a scratch register before modification.
341 Register Peek2();
342
343 // Duplicate the top of stack.
344 void Dup();
345
346 // Duplicate the two elements on top of stack.
347 void Dup2();
348
349 // Flushes all registers, but it puts a copy of the top-of-stack in r0.
350 void SpillAllButCopyTOSToR0();
351
352 // Flushes all registers, but it puts a copy of the top-of-stack in r1.
353 void SpillAllButCopyTOSToR1();
354
355 // Flushes all registers, but it puts a copy of the top-of-stack in r1
356 // and the next value on the stack in r0.
357 void SpillAllButCopyTOSToR1R0();
358
359 // Pop and save an element from the top of the expression stack and
360 // emit a corresponding pop instruction.
361 void EmitPop(Register reg);
362
363 // Takes the top two elements and puts them in r0 (top element) and r1
364 // (second element).
365 void PopToR1R0();
366
367 // Takes the top element and puts it in r1.
368 void PopToR1();
369
370 // Takes the top element and puts it in r0.
371 void PopToR0();
372
373 // Push an element on top of the expression stack and emit a
374 // corresponding push instruction.
375 void EmitPush(Register reg, TypeInfo type_info = TypeInfo::Unknown());
376 void EmitPush(Operand operand, TypeInfo type_info = TypeInfo::Unknown());
377 void EmitPush(MemOperand operand, TypeInfo type_info = TypeInfo::Unknown());
378 void EmitPushRoot(Heap::RootListIndex index);
379
380 // Overwrite the nth thing on the stack. If the nth position is in a
381 // register then this turns into a mov, otherwise an str. Afterwards
382 // you can still use the register even if it is a register that can be
383 // used for TOS (r0 or r1).
384 void SetElementAt(Register reg, int this_far_down);
385
386 // Get a register which is free and which must be immediately used to
387 // push on the top of the stack.
388 Register GetTOSRegister();
389
390 // Push multiple registers on the stack and the virtual frame
391 // Register are selected by setting bit in src_regs and
392 // are pushed in decreasing order: r15 .. r0.
393 void EmitPushMultiple(int count, int src_regs);
394
395 static Register scratch0() { return r7; }
396 static Register scratch1() { return r9; }
397
398 private:
399 static const int kLocal0Offset = JavaScriptFrameConstants::kLocal0Offset;
400 static const int kFunctionOffset = JavaScriptFrameConstants::kFunctionOffset;
401 static const int kContextOffset = StandardFrameConstants::kContextOffset;
402
403 static const int kHandlerSize = StackHandlerConstants::kSize / kPointerSize;
404 static const int kPreallocatedElements = 5 + 8; // 8 expression stack slots.
405
406 // 5 states for the top of stack, which can be in memory or in r0 and r1.
407 enum TopOfStack {
408 NO_TOS_REGISTERS,
409 R0_TOS,
410 R1_TOS,
411 R1_R0_TOS,
412 R0_R1_TOS,
413 TOS_STATES
414 };
415
416 static const int kMaxTOSRegisters = 2;
417
418 static const bool kR0InUse[TOS_STATES];
419 static const bool kR1InUse[TOS_STATES];
420 static const int kVirtualElements[TOS_STATES];
421 static const TopOfStack kStateAfterPop[TOS_STATES];
422 static const TopOfStack kStateAfterPush[TOS_STATES];
423 static const Register kTopRegister[TOS_STATES];
424 static const Register kBottomRegister[TOS_STATES];
425
426 // We allocate up to 5 locals in registers.
427 static const int kNumberOfAllocatedRegisters = 5;
428 // r2 to r6 are allocated to locals.
429 static const int kFirstAllocatedRegister = 2;
430
431 static const Register kAllocatedRegisters[kNumberOfAllocatedRegisters];
432
433 static Register AllocatedRegister(int r) {
434 ASSERT(r >= 0 && r < kNumberOfAllocatedRegisters);
435 return kAllocatedRegisters[r];
436 }
437
438 // The number of elements on the stack frame.
439 int element_count_;
440 TopOfStack top_of_stack_state_:3;
441 int register_allocation_map_:kNumberOfAllocatedRegisters;
442 static const int kTOSKnownSmiMapSize = 4;
443 unsigned tos_known_smi_map_:kTOSKnownSmiMapSize;
444
445 // The index of the element that is at the processor's stack pointer
446 // (the sp register). For now since everything is in memory it is given
447 // by the number of elements on the not-very-virtual stack frame.
448 int stack_pointer() { return element_count_ - 1; }
449
450 // The number of frame-allocated locals and parameters respectively.
451 inline int parameter_count() const;
452 inline int local_count() const;
453
454 // The index of the element that is at the processor's frame pointer
455 // (the fp register). The parameters, receiver, function, and context
456 // are below the frame pointer.
457 inline int frame_pointer() const;
458
459 // The index of the first parameter. The receiver lies below the first
460 // parameter.
461 int param0_index() { return 1; }
462
463 // The index of the context slot in the frame. It is immediately
464 // below the frame pointer.
465 inline int context_index();
466
467 // The index of the function slot in the frame. It is below the frame
468 // pointer and context slot.
469 inline int function_index();
470
471 // The index of the first local. Between the frame pointer and the
472 // locals lies the return address.
473 inline int local0_index() const;
474
475 // The index of the base of the expression stack.
476 inline int expression_base_index() const;
477
478 // Convert a frame index into a frame pointer relative offset into the
479 // actual stack.
480 inline int fp_relative(int index);
481
482 // Spill all elements in registers. Spill the top spilled_args elements
483 // on the frame. Sync all other frame elements.
484 // Then drop dropped_args elements from the virtual frame, to match
485 // the effect of an upcoming call that will drop them from the stack.
486 void PrepareForCall(int spilled_args, int dropped_args);
487
488 // If all top-of-stack registers are in use then the lowest one is pushed
489 // onto the physical stack and made free.
490 void EnsureOneFreeTOSRegister();
491
492 // Emit instructions to get the top of stack state from where we are to where
493 // we want to be.
494 void MergeTOSTo(TopOfStack expected_state, Condition cond = al);
495
496 inline bool Equals(const VirtualFrame* other);
497
498 inline void LowerHeight(int count) {
499 element_count_ -= count;
500 if (count >= kTOSKnownSmiMapSize) {
501 tos_known_smi_map_ = 0;
502 } else {
503 tos_known_smi_map_ >>= count;
504 }
505 }
506
507 inline void RaiseHeight(int count, unsigned known_smi_map = 0) {
508 ASSERT(count >= 32 || known_smi_map < (1u << count));
509 element_count_ += count;
510 if (count >= kTOSKnownSmiMapSize) {
511 tos_known_smi_map_ = known_smi_map;
512 } else {
513 tos_known_smi_map_ = ((tos_known_smi_map_ << count) | known_smi_map);
514 }
515 }
516
517 friend class JumpTarget;
518 };
519
520
521 } } // namespace v8::internal
522
523 #endif // V8_ARM_VIRTUAL_FRAME_ARM_H_
OLDNEW
« no previous file with comments | « src/arm/stub-cache-arm.cc ('k') | src/arm/virtual-frame-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698