Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(374)

Side by Side Diff: src/ia32/virtual-frame-ia32.cc

Issue 6811012: Remove some dead code. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/virtual-frame-ia32.h ('k') | src/jump-target.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_IA32)
31
32 #include "codegen-inl.h"
33 #include "register-allocator-inl.h"
34 #include "scopes.h"
35 #include "virtual-frame-inl.h"
36 #include "stub-cache.h"
37
38 namespace v8 {
39 namespace internal {
40
41 #define __ ACCESS_MASM(masm())
42
43 void VirtualFrame::SyncElementBelowStackPointer(int index) {
44 // Emit code to write elements below the stack pointer to their
45 // (already allocated) stack address.
46 ASSERT(index <= stack_pointer_);
47 FrameElement element = elements_[index];
48 ASSERT(!element.is_synced());
49 switch (element.type()) {
50 case FrameElement::INVALID:
51 break;
52
53 case FrameElement::MEMORY:
54 // This function should not be called with synced elements.
55 // (memory elements are always synced).
56 UNREACHABLE();
57 break;
58
59 case FrameElement::REGISTER:
60 __ mov(Operand(ebp, fp_relative(index)), element.reg());
61 break;
62
63 case FrameElement::CONSTANT:
64 if (cgen()->IsUnsafeSmi(element.handle())) {
65 cgen()->StoreUnsafeSmiToLocal(fp_relative(index), element.handle());
66 } else {
67 __ Set(Operand(ebp, fp_relative(index)),
68 Immediate(element.handle()));
69 }
70 break;
71
72 case FrameElement::COPY: {
73 int backing_index = element.index();
74 FrameElement backing_element = elements_[backing_index];
75 if (backing_element.is_memory()) {
76 Result temp = cgen()->allocator()->Allocate();
77 ASSERT(temp.is_valid());
78 __ mov(temp.reg(), Operand(ebp, fp_relative(backing_index)));
79 __ mov(Operand(ebp, fp_relative(index)), temp.reg());
80 } else {
81 ASSERT(backing_element.is_register());
82 __ mov(Operand(ebp, fp_relative(index)), backing_element.reg());
83 }
84 break;
85 }
86 }
87 elements_[index].set_sync();
88 }
89
90
91 void VirtualFrame::SyncElementByPushing(int index) {
92 // Sync an element of the frame that is just above the stack pointer
93 // by pushing it.
94 ASSERT(index == stack_pointer_ + 1);
95 stack_pointer_++;
96 FrameElement element = elements_[index];
97
98 switch (element.type()) {
99 case FrameElement::INVALID:
100 __ push(Immediate(Smi::FromInt(0)));
101 break;
102
103 case FrameElement::MEMORY:
104 // No memory elements exist above the stack pointer.
105 UNREACHABLE();
106 break;
107
108 case FrameElement::REGISTER:
109 __ push(element.reg());
110 break;
111
112 case FrameElement::CONSTANT:
113 if (cgen()->IsUnsafeSmi(element.handle())) {
114 cgen()->PushUnsafeSmi(element.handle());
115 } else {
116 __ push(Immediate(element.handle()));
117 }
118 break;
119
120 case FrameElement::COPY: {
121 int backing_index = element.index();
122 FrameElement backing = elements_[backing_index];
123 ASSERT(backing.is_memory() || backing.is_register());
124 if (backing.is_memory()) {
125 __ push(Operand(ebp, fp_relative(backing_index)));
126 } else {
127 __ push(backing.reg());
128 }
129 break;
130 }
131 }
132 elements_[index].set_sync();
133 }
134
135
136 // Clear the dirty bits for the range of elements in
137 // [min(stack_pointer_ + 1,begin), end].
138 void VirtualFrame::SyncRange(int begin, int end) {
139 ASSERT(begin >= 0);
140 ASSERT(end < element_count());
141 // Sync elements below the range if they have not been materialized
142 // on the stack.
143 int start = Min(begin, stack_pointer_ + 1);
144
145 // Emit normal push instructions for elements above stack pointer
146 // and use mov instructions if we are below stack pointer.
147 for (int i = start; i <= end; i++) {
148 if (!elements_[i].is_synced()) {
149 if (i <= stack_pointer_) {
150 SyncElementBelowStackPointer(i);
151 } else {
152 SyncElementByPushing(i);
153 }
154 }
155 }
156 }
157
158
159 void VirtualFrame::MakeMergable() {
160 for (int i = 0; i < element_count(); i++) {
161 FrameElement element = elements_[i];
162
163 // All number type information is reset to unknown for a mergable frame
164 // because of incoming back edges.
165 if (element.is_constant() || element.is_copy()) {
166 if (element.is_synced()) {
167 // Just spill.
168 elements_[i] = FrameElement::MemoryElement(TypeInfo::Unknown());
169 } else {
170 // Allocate to a register.
171 FrameElement backing_element; // Invalid if not a copy.
172 if (element.is_copy()) {
173 backing_element = elements_[element.index()];
174 }
175 Result fresh = cgen()->allocator()->Allocate();
176 ASSERT(fresh.is_valid()); // A register was spilled if all were in use.
177 elements_[i] =
178 FrameElement::RegisterElement(fresh.reg(),
179 FrameElement::NOT_SYNCED,
180 TypeInfo::Unknown());
181 Use(fresh.reg(), i);
182
183 // Emit a move.
184 if (element.is_constant()) {
185 if (cgen()->IsUnsafeSmi(element.handle())) {
186 cgen()->MoveUnsafeSmi(fresh.reg(), element.handle());
187 } else {
188 __ Set(fresh.reg(), Immediate(element.handle()));
189 }
190 } else {
191 ASSERT(element.is_copy());
192 // Copies are only backed by register or memory locations.
193 if (backing_element.is_register()) {
194 // The backing store may have been spilled by allocating,
195 // but that's OK. If it was, the value is right where we
196 // want it.
197 if (!fresh.reg().is(backing_element.reg())) {
198 __ mov(fresh.reg(), backing_element.reg());
199 }
200 } else {
201 ASSERT(backing_element.is_memory());
202 __ mov(fresh.reg(), Operand(ebp, fp_relative(element.index())));
203 }
204 }
205 }
206 // No need to set the copied flag --- there are no copies.
207 } else {
208 // Clear the copy flag of non-constant, non-copy elements.
209 // They cannot be copied because copies are not allowed.
210 // The copy flag is not relied on before the end of this loop,
211 // including when registers are spilled.
212 elements_[i].clear_copied();
213 elements_[i].set_type_info(TypeInfo::Unknown());
214 }
215 }
216 }
217
218
219 void VirtualFrame::MergeTo(VirtualFrame* expected) {
220 Comment cmnt(masm(), "[ Merge frame");
221 // We should always be merging the code generator's current frame to an
222 // expected frame.
223 ASSERT(cgen()->frame() == this);
224
225 // Adjust the stack pointer upward (toward the top of the virtual
226 // frame) if necessary.
227 if (stack_pointer_ < expected->stack_pointer_) {
228 int difference = expected->stack_pointer_ - stack_pointer_;
229 stack_pointer_ = expected->stack_pointer_;
230 __ sub(Operand(esp), Immediate(difference * kPointerSize));
231 }
232
233 MergeMoveRegistersToMemory(expected);
234 MergeMoveRegistersToRegisters(expected);
235 MergeMoveMemoryToRegisters(expected);
236
237 // Adjust the stack pointer downward if necessary.
238 if (stack_pointer_ > expected->stack_pointer_) {
239 int difference = stack_pointer_ - expected->stack_pointer_;
240 stack_pointer_ = expected->stack_pointer_;
241 __ add(Operand(esp), Immediate(difference * kPointerSize));
242 }
243
244 // At this point, the frames should be identical.
245 ASSERT(Equals(expected));
246 }
247
248
249 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
250 ASSERT(stack_pointer_ >= expected->stack_pointer_);
251
252 // Move registers, constants, and copies to memory. Perform moves
253 // from the top downward in the frame in order to leave the backing
254 // stores of copies in registers.
255 //
256 // Moving memory-backed copies to memory requires a spare register
257 // for the memory-to-memory moves. Since we are performing a merge,
258 // we use esi (which is already saved in the frame). We keep track
259 // of the index of the frame element esi is caching or kIllegalIndex
260 // if esi has not been disturbed.
261 int esi_caches = kIllegalIndex;
262 for (int i = element_count() - 1; i >= 0; i--) {
263 FrameElement target = expected->elements_[i];
264 if (target.is_register()) continue; // Handle registers later.
265 if (target.is_memory()) {
266 FrameElement source = elements_[i];
267 switch (source.type()) {
268 case FrameElement::INVALID:
269 // Not a legal merge move.
270 UNREACHABLE();
271 break;
272
273 case FrameElement::MEMORY:
274 // Already in place.
275 break;
276
277 case FrameElement::REGISTER:
278 Unuse(source.reg());
279 if (!source.is_synced()) {
280 __ mov(Operand(ebp, fp_relative(i)), source.reg());
281 }
282 break;
283
284 case FrameElement::CONSTANT:
285 if (!source.is_synced()) {
286 if (cgen()->IsUnsafeSmi(source.handle())) {
287 esi_caches = i;
288 cgen()->MoveUnsafeSmi(esi, source.handle());
289 __ mov(Operand(ebp, fp_relative(i)), esi);
290 } else {
291 __ Set(Operand(ebp, fp_relative(i)), Immediate(source.handle()));
292 }
293 }
294 break;
295
296 case FrameElement::COPY:
297 if (!source.is_synced()) {
298 int backing_index = source.index();
299 FrameElement backing_element = elements_[backing_index];
300 if (backing_element.is_memory()) {
301 // If we have to spill a register, we spill esi.
302 if (esi_caches != backing_index) {
303 esi_caches = backing_index;
304 __ mov(esi, Operand(ebp, fp_relative(backing_index)));
305 }
306 __ mov(Operand(ebp, fp_relative(i)), esi);
307 } else {
308 ASSERT(backing_element.is_register());
309 __ mov(Operand(ebp, fp_relative(i)), backing_element.reg());
310 }
311 }
312 break;
313 }
314 }
315 elements_[i] = target;
316 }
317
318 if (esi_caches != kIllegalIndex) {
319 __ mov(esi, Operand(ebp, fp_relative(context_index())));
320 }
321 }
322
323
324 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
325 // We have already done X-to-memory moves.
326 ASSERT(stack_pointer_ >= expected->stack_pointer_);
327
328 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
329 // Move the right value into register i if it is currently in a register.
330 int index = expected->register_location(i);
331 int use_index = register_location(i);
332 // Skip if register i is unused in the target or else if source is
333 // not a register (this is not a register-to-register move).
334 if (index == kIllegalIndex || !elements_[index].is_register()) continue;
335
336 Register target = RegisterAllocator::ToRegister(i);
337 Register source = elements_[index].reg();
338 if (index != use_index) {
339 if (use_index == kIllegalIndex) { // Target is currently unused.
340 // Copy contents of source from source to target.
341 // Set frame element register to target.
342 Use(target, index);
343 Unuse(source);
344 __ mov(target, source);
345 } else {
346 // Exchange contents of registers source and target.
347 // Nothing except the register backing use_index has changed.
348 elements_[use_index].set_reg(source);
349 set_register_location(target, index);
350 set_register_location(source, use_index);
351 __ xchg(source, target);
352 }
353 }
354
355 if (!elements_[index].is_synced() &&
356 expected->elements_[index].is_synced()) {
357 __ mov(Operand(ebp, fp_relative(index)), target);
358 }
359 elements_[index] = expected->elements_[index];
360 }
361 }
362
363
364 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
365 // Move memory, constants, and copies to registers. This is the
366 // final step and since it is not done from the bottom up, but in
367 // register code order, we have special code to ensure that the backing
368 // elements of copies are in their correct locations when we
369 // encounter the copies.
370 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
371 int index = expected->register_location(i);
372 if (index != kIllegalIndex) {
373 FrameElement source = elements_[index];
374 FrameElement target = expected->elements_[index];
375 Register target_reg = RegisterAllocator::ToRegister(i);
376 ASSERT(target.reg().is(target_reg));
377 switch (source.type()) {
378 case FrameElement::INVALID: // Fall through.
379 UNREACHABLE();
380 break;
381 case FrameElement::REGISTER:
382 ASSERT(source.Equals(target));
383 // Go to next iteration. Skips Use(target_reg) and syncing
384 // below. It is safe to skip syncing because a target
385 // register frame element would only be synced if all source
386 // elements were.
387 continue;
388 break;
389 case FrameElement::MEMORY:
390 ASSERT(index <= stack_pointer_);
391 __ mov(target_reg, Operand(ebp, fp_relative(index)));
392 break;
393
394 case FrameElement::CONSTANT:
395 if (cgen()->IsUnsafeSmi(source.handle())) {
396 cgen()->MoveUnsafeSmi(target_reg, source.handle());
397 } else {
398 __ Set(target_reg, Immediate(source.handle()));
399 }
400 break;
401
402 case FrameElement::COPY: {
403 int backing_index = source.index();
404 FrameElement backing = elements_[backing_index];
405 ASSERT(backing.is_memory() || backing.is_register());
406 if (backing.is_memory()) {
407 ASSERT(backing_index <= stack_pointer_);
408 // Code optimization if backing store should also move
409 // to a register: move backing store to its register first.
410 if (expected->elements_[backing_index].is_register()) {
411 FrameElement new_backing = expected->elements_[backing_index];
412 Register new_backing_reg = new_backing.reg();
413 ASSERT(!is_used(new_backing_reg));
414 elements_[backing_index] = new_backing;
415 Use(new_backing_reg, backing_index);
416 __ mov(new_backing_reg,
417 Operand(ebp, fp_relative(backing_index)));
418 __ mov(target_reg, new_backing_reg);
419 } else {
420 __ mov(target_reg, Operand(ebp, fp_relative(backing_index)));
421 }
422 } else {
423 __ mov(target_reg, backing.reg());
424 }
425 }
426 }
427 // Ensure the proper sync state.
428 if (target.is_synced() && !source.is_synced()) {
429 __ mov(Operand(ebp, fp_relative(index)), target_reg);
430 }
431 Use(target_reg, index);
432 elements_[index] = target;
433 }
434 }
435 }
436
437
438 void VirtualFrame::Enter() {
439 // Registers live on entry: esp, ebp, esi, edi.
440 Comment cmnt(masm(), "[ Enter JS frame");
441
442 #ifdef DEBUG
443 if (FLAG_debug_code) {
444 // Verify that edi contains a JS function. The following code
445 // relies on eax being available for use.
446 __ test(edi, Immediate(kSmiTagMask));
447 __ Check(not_zero,
448 "VirtualFrame::Enter - edi is not a function (smi check).");
449 __ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
450 __ Check(equal,
451 "VirtualFrame::Enter - edi is not a function (map check).");
452 }
453 #endif
454
455 EmitPush(ebp);
456
457 __ mov(ebp, Operand(esp));
458
459 // Store the context in the frame. The context is kept in esi and a
460 // copy is stored in the frame. The external reference to esi
461 // remains.
462 EmitPush(esi);
463
464 // Store the function in the frame. The frame owns the register
465 // reference now (ie, it can keep it in edi or spill it later).
466 Push(edi);
467 SyncElementAt(element_count() - 1);
468 cgen()->allocator()->Unuse(edi);
469 }
470
471
472 void VirtualFrame::Exit() {
473 Comment cmnt(masm(), "[ Exit JS frame");
474 // Record the location of the JS exit code for patching when setting
475 // break point.
476 __ RecordJSReturn();
477
478 // Avoid using the leave instruction here, because it is too
479 // short. We need the return sequence to be a least the size of a
480 // call instruction to support patching the exit code in the
481 // debugger. See VisitReturnStatement for the full return sequence.
482 __ mov(esp, Operand(ebp));
483 stack_pointer_ = frame_pointer();
484 for (int i = element_count() - 1; i > stack_pointer_; i--) {
485 FrameElement last = elements_.RemoveLast();
486 if (last.is_register()) {
487 Unuse(last.reg());
488 }
489 }
490
491 EmitPop(ebp);
492 }
493
494
495 void VirtualFrame::AllocateStackSlots() {
496 int count = local_count();
497 if (count > 0) {
498 Comment cmnt(masm(), "[ Allocate space for locals");
499 // The locals are initialized to a constant (the undefined value), but
500 // we sync them with the actual frame to allocate space for spilling
501 // them later. First sync everything above the stack pointer so we can
502 // use pushes to allocate and initialize the locals.
503 SyncRange(stack_pointer_ + 1, element_count() - 1);
504 Handle<Object> undefined = FACTORY->undefined_value();
505 FrameElement initial_value =
506 FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
507 if (count == 1) {
508 __ push(Immediate(undefined));
509 } else if (count < kLocalVarBound) {
510 // For less locals the unrolled loop is more compact.
511 Result temp = cgen()->allocator()->Allocate();
512 ASSERT(temp.is_valid());
513 __ Set(temp.reg(), Immediate(undefined));
514 for (int i = 0; i < count; i++) {
515 __ push(temp.reg());
516 }
517 } else {
518 // For more locals a loop in generated code is more compact.
519 Label alloc_locals_loop;
520 Result cnt = cgen()->allocator()->Allocate();
521 Result tmp = cgen()->allocator()->Allocate();
522 ASSERT(cnt.is_valid());
523 ASSERT(tmp.is_valid());
524 __ mov(cnt.reg(), Immediate(count));
525 __ mov(tmp.reg(), Immediate(undefined));
526 __ bind(&alloc_locals_loop);
527 __ push(tmp.reg());
528 __ dec(cnt.reg());
529 __ j(not_zero, &alloc_locals_loop);
530 }
531 for (int i = 0; i < count; i++) {
532 elements_.Add(initial_value);
533 stack_pointer_++;
534 }
535 }
536 }
537
538
539 void VirtualFrame::SaveContextRegister() {
540 ASSERT(elements_[context_index()].is_memory());
541 __ mov(Operand(ebp, fp_relative(context_index())), esi);
542 }
543
544
545 void VirtualFrame::RestoreContextRegister() {
546 ASSERT(elements_[context_index()].is_memory());
547 __ mov(esi, Operand(ebp, fp_relative(context_index())));
548 }
549
550
551 void VirtualFrame::PushReceiverSlotAddress() {
552 Result temp = cgen()->allocator()->Allocate();
553 ASSERT(temp.is_valid());
554 __ lea(temp.reg(), ParameterAt(-1));
555 Push(&temp);
556 }
557
558
559 int VirtualFrame::InvalidateFrameSlotAt(int index) {
560 FrameElement original = elements_[index];
561
562 // Is this element the backing store of any copies?
563 int new_backing_index = kIllegalIndex;
564 if (original.is_copied()) {
565 // Verify it is copied, and find first copy.
566 for (int i = index + 1; i < element_count(); i++) {
567 if (elements_[i].is_copy() && elements_[i].index() == index) {
568 new_backing_index = i;
569 break;
570 }
571 }
572 }
573
574 if (new_backing_index == kIllegalIndex) {
575 // No copies found, return kIllegalIndex.
576 if (original.is_register()) {
577 Unuse(original.reg());
578 }
579 elements_[index] = FrameElement::InvalidElement();
580 return kIllegalIndex;
581 }
582
583 // This is the backing store of copies.
584 Register backing_reg;
585 if (original.is_memory()) {
586 Result fresh = cgen()->allocator()->Allocate();
587 ASSERT(fresh.is_valid());
588 Use(fresh.reg(), new_backing_index);
589 backing_reg = fresh.reg();
590 __ mov(backing_reg, Operand(ebp, fp_relative(index)));
591 } else {
592 // The original was in a register.
593 backing_reg = original.reg();
594 set_register_location(backing_reg, new_backing_index);
595 }
596 // Invalidate the element at index.
597 elements_[index] = FrameElement::InvalidElement();
598 // Set the new backing element.
599 if (elements_[new_backing_index].is_synced()) {
600 elements_[new_backing_index] =
601 FrameElement::RegisterElement(backing_reg,
602 FrameElement::SYNCED,
603 original.type_info());
604 } else {
605 elements_[new_backing_index] =
606 FrameElement::RegisterElement(backing_reg,
607 FrameElement::NOT_SYNCED,
608 original.type_info());
609 }
610 // Update the other copies.
611 for (int i = new_backing_index + 1; i < element_count(); i++) {
612 if (elements_[i].is_copy() && elements_[i].index() == index) {
613 elements_[i].set_index(new_backing_index);
614 elements_[new_backing_index].set_copied();
615 }
616 }
617 return new_backing_index;
618 }
619
620
621 void VirtualFrame::TakeFrameSlotAt(int index) {
622 ASSERT(index >= 0);
623 ASSERT(index <= element_count());
624 FrameElement original = elements_[index];
625 int new_backing_store_index = InvalidateFrameSlotAt(index);
626 if (new_backing_store_index != kIllegalIndex) {
627 elements_.Add(CopyElementAt(new_backing_store_index));
628 return;
629 }
630
631 switch (original.type()) {
632 case FrameElement::MEMORY: {
633 // Emit code to load the original element's data into a register.
634 // Push that register as a FrameElement on top of the frame.
635 Result fresh = cgen()->allocator()->Allocate();
636 ASSERT(fresh.is_valid());
637 FrameElement new_element =
638 FrameElement::RegisterElement(fresh.reg(),
639 FrameElement::NOT_SYNCED,
640 original.type_info());
641 Use(fresh.reg(), element_count());
642 elements_.Add(new_element);
643 __ mov(fresh.reg(), Operand(ebp, fp_relative(index)));
644 break;
645 }
646 case FrameElement::REGISTER:
647 Use(original.reg(), element_count());
648 // Fall through.
649 case FrameElement::CONSTANT:
650 case FrameElement::COPY:
651 original.clear_sync();
652 elements_.Add(original);
653 break;
654 case FrameElement::INVALID:
655 UNREACHABLE();
656 break;
657 }
658 }
659
660
661 void VirtualFrame::StoreToFrameSlotAt(int index) {
662 // Store the value on top of the frame to the virtual frame slot at
663 // a given index. The value on top of the frame is left in place.
664 // This is a duplicating operation, so it can create copies.
665 ASSERT(index >= 0);
666 ASSERT(index < element_count());
667
668 int top_index = element_count() - 1;
669 FrameElement top = elements_[top_index];
670 FrameElement original = elements_[index];
671 if (top.is_copy() && top.index() == index) return;
672 ASSERT(top.is_valid());
673
674 InvalidateFrameSlotAt(index);
675
676 // InvalidateFrameSlotAt can potentially change any frame element, due
677 // to spilling registers to allocate temporaries in order to preserve
678 // the copy-on-write semantics of aliased elements. Reload top from
679 // the frame.
680 top = elements_[top_index];
681
682 if (top.is_copy()) {
683 // There are two cases based on the relative positions of the
684 // stored-to slot and the backing slot of the top element.
685 int backing_index = top.index();
686 ASSERT(backing_index != index);
687 if (backing_index < index) {
688 // 1. The top element is a copy of a slot below the stored-to
689 // slot. The stored-to slot becomes an unsynced copy of that
690 // same backing slot.
691 elements_[index] = CopyElementAt(backing_index);
692 } else {
693 // 2. The top element is a copy of a slot above the stored-to
694 // slot. The stored-to slot becomes the new (unsynced) backing
695 // slot and both the top element and the element at the former
696 // backing slot become copies of it. The sync state of the top
697 // and former backing elements is preserved.
698 FrameElement backing_element = elements_[backing_index];
699 ASSERT(backing_element.is_memory() || backing_element.is_register());
700 if (backing_element.is_memory()) {
701 // Because sets of copies are canonicalized to be backed by
702 // their lowest frame element, and because memory frame
703 // elements are backed by the corresponding stack address, we
704 // have to move the actual value down in the stack.
705 //
706 // TODO(209): considering allocating the stored-to slot to the
707 // temp register. Alternatively, allow copies to appear in
708 // any order in the frame and lazily move the value down to
709 // the slot.
710 Result temp = cgen()->allocator()->Allocate();
711 ASSERT(temp.is_valid());
712 __ mov(temp.reg(), Operand(ebp, fp_relative(backing_index)));
713 __ mov(Operand(ebp, fp_relative(index)), temp.reg());
714 } else {
715 set_register_location(backing_element.reg(), index);
716 if (backing_element.is_synced()) {
717 // If the element is a register, we will not actually move
718 // anything on the stack but only update the virtual frame
719 // element.
720 backing_element.clear_sync();
721 }
722 }
723 elements_[index] = backing_element;
724
725 // The old backing element becomes a copy of the new backing
726 // element.
727 FrameElement new_element = CopyElementAt(index);
728 elements_[backing_index] = new_element;
729 if (backing_element.is_synced()) {
730 elements_[backing_index].set_sync();
731 }
732
733 // All the copies of the old backing element (including the top
734 // element) become copies of the new backing element.
735 for (int i = backing_index + 1; i < element_count(); i++) {
736 if (elements_[i].is_copy() && elements_[i].index() == backing_index) {
737 elements_[i].set_index(index);
738 }
739 }
740 }
741 return;
742 }
743
744 // Move the top element to the stored-to slot and replace it (the
745 // top element) with a copy.
746 elements_[index] = top;
747 if (top.is_memory()) {
748 // TODO(209): consider allocating the stored-to slot to the temp
749 // register. Alternatively, allow copies to appear in any order
750 // in the frame and lazily move the value down to the slot.
751 FrameElement new_top = CopyElementAt(index);
752 new_top.set_sync();
753 elements_[top_index] = new_top;
754
755 // The sync state of the former top element is correct (synced).
756 // Emit code to move the value down in the frame.
757 Result temp = cgen()->allocator()->Allocate();
758 ASSERT(temp.is_valid());
759 __ mov(temp.reg(), Operand(esp, 0));
760 __ mov(Operand(ebp, fp_relative(index)), temp.reg());
761 } else if (top.is_register()) {
762 set_register_location(top.reg(), index);
763 // The stored-to slot has the (unsynced) register reference and
764 // the top element becomes a copy. The sync state of the top is
765 // preserved.
766 FrameElement new_top = CopyElementAt(index);
767 if (top.is_synced()) {
768 new_top.set_sync();
769 elements_[index].clear_sync();
770 }
771 elements_[top_index] = new_top;
772 } else {
773 // The stored-to slot holds the same value as the top but
774 // unsynced. (We do not have copies of constants yet.)
775 ASSERT(top.is_constant());
776 elements_[index].clear_sync();
777 }
778 }
779
780
781 void VirtualFrame::UntaggedPushFrameSlotAt(int index) {
782 ASSERT(index >= 0);
783 ASSERT(index <= element_count());
784 FrameElement original = elements_[index];
785 if (original.is_copy()) {
786 original = elements_[original.index()];
787 index = original.index();
788 }
789
790 switch (original.type()) {
791 case FrameElement::MEMORY:
792 case FrameElement::REGISTER: {
793 Label done;
794 // Emit code to load the original element's data into a register.
795 // Push that register as a FrameElement on top of the frame.
796 Result fresh = cgen()->allocator()->Allocate();
797 ASSERT(fresh.is_valid());
798 Register fresh_reg = fresh.reg();
799 FrameElement new_element =
800 FrameElement::RegisterElement(fresh_reg,
801 FrameElement::NOT_SYNCED,
802 original.type_info());
803 new_element.set_untagged_int32(true);
804 Use(fresh_reg, element_count());
805 fresh.Unuse(); // BreakTarget does not handle a live Result well.
806 elements_.Add(new_element);
807 if (original.is_register()) {
808 __ mov(fresh_reg, original.reg());
809 } else {
810 ASSERT(original.is_memory());
811 __ mov(fresh_reg, Operand(ebp, fp_relative(index)));
812 }
813 // Now convert the value to int32, or bail out.
814 if (original.type_info().IsSmi()) {
815 __ SmiUntag(fresh_reg);
816 // Pushing the element is completely done.
817 } else {
818 __ test(fresh_reg, Immediate(kSmiTagMask));
819 Label not_smi;
820 __ j(not_zero, &not_smi);
821 __ SmiUntag(fresh_reg);
822 __ jmp(&done);
823
824 __ bind(&not_smi);
825 if (!original.type_info().IsNumber()) {
826 __ cmp(FieldOperand(fresh_reg, HeapObject::kMapOffset),
827 FACTORY->heap_number_map());
828 cgen()->unsafe_bailout_->Branch(not_equal);
829 }
830
831 if (!CpuFeatures::IsSupported(SSE2)) {
832 UNREACHABLE();
833 } else {
834 CpuFeatures::Scope use_sse2(SSE2);
835 __ movdbl(xmm0, FieldOperand(fresh_reg, HeapNumber::kValueOffset));
836 __ cvttsd2si(fresh_reg, Operand(xmm0));
837 __ cvtsi2sd(xmm1, Operand(fresh_reg));
838 __ ucomisd(xmm0, xmm1);
839 cgen()->unsafe_bailout_->Branch(not_equal);
840 cgen()->unsafe_bailout_->Branch(parity_even); // NaN.
841 // Test for negative zero.
842 __ test(fresh_reg, Operand(fresh_reg));
843 __ j(not_zero, &done);
844 __ movmskpd(fresh_reg, xmm0);
845 __ and_(fresh_reg, 0x1);
846 cgen()->unsafe_bailout_->Branch(not_equal);
847 }
848 __ bind(&done);
849 }
850 break;
851 }
852 case FrameElement::CONSTANT:
853 elements_.Add(CopyElementAt(index));
854 elements_[element_count() - 1].set_untagged_int32(true);
855 break;
856 case FrameElement::COPY:
857 case FrameElement::INVALID:
858 UNREACHABLE();
859 break;
860 }
861 }
862
863
864 void VirtualFrame::PushTryHandler(HandlerType type) {
865 ASSERT(cgen()->HasValidEntryRegisters());
866 // Grow the expression stack by handler size less one (the return
867 // address is already pushed by a call instruction).
868 Adjust(kHandlerSize - 1);
869 __ PushTryHandler(IN_JAVASCRIPT, type);
870 }
871
872
873 Result VirtualFrame::RawCallStub(CodeStub* stub) {
874 ASSERT(cgen()->HasValidEntryRegisters());
875 __ CallStub(stub);
876 Result result = cgen()->allocator()->Allocate(eax);
877 ASSERT(result.is_valid());
878 return result;
879 }
880
881
882 Result VirtualFrame::CallStub(CodeStub* stub, Result* arg) {
883 PrepareForCall(0, 0);
884 arg->ToRegister(eax);
885 arg->Unuse();
886 return RawCallStub(stub);
887 }
888
889
890 Result VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
891 PrepareForCall(0, 0);
892
893 if (arg0->is_register() && arg0->reg().is(eax)) {
894 if (arg1->is_register() && arg1->reg().is(edx)) {
895 // Wrong registers.
896 __ xchg(eax, edx);
897 } else {
898 // Register edx is free for arg0, which frees eax for arg1.
899 arg0->ToRegister(edx);
900 arg1->ToRegister(eax);
901 }
902 } else {
903 // Register eax is free for arg1, which guarantees edx is free for
904 // arg0.
905 arg1->ToRegister(eax);
906 arg0->ToRegister(edx);
907 }
908
909 arg0->Unuse();
910 arg1->Unuse();
911 return RawCallStub(stub);
912 }
913
914
915 Result VirtualFrame::CallJSFunction(int arg_count) {
916 Result function = Pop();
917
918 // InvokeFunction requires function in edi. Move it in there.
919 function.ToRegister(edi);
920 function.Unuse();
921
922 // +1 for receiver.
923 PrepareForCall(arg_count + 1, arg_count + 1);
924 ASSERT(cgen()->HasValidEntryRegisters());
925 ParameterCount count(arg_count);
926 __ InvokeFunction(edi, count, CALL_FUNCTION);
927 RestoreContextRegister();
928 Result result = cgen()->allocator()->Allocate(eax);
929 ASSERT(result.is_valid());
930 return result;
931 }
932
933
934 Result VirtualFrame::CallRuntime(const Runtime::Function* f, int arg_count) {
935 PrepareForCall(arg_count, arg_count);
936 ASSERT(cgen()->HasValidEntryRegisters());
937 __ CallRuntime(f, arg_count);
938 Result result = cgen()->allocator()->Allocate(eax);
939 ASSERT(result.is_valid());
940 return result;
941 }
942
943
944 Result VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
945 PrepareForCall(arg_count, arg_count);
946 ASSERT(cgen()->HasValidEntryRegisters());
947 __ CallRuntime(id, arg_count);
948 Result result = cgen()->allocator()->Allocate(eax);
949 ASSERT(result.is_valid());
950 return result;
951 }
952
953
954 #ifdef ENABLE_DEBUGGER_SUPPORT
955 void VirtualFrame::DebugBreak() {
956 PrepareForCall(0, 0);
957 ASSERT(cgen()->HasValidEntryRegisters());
958 __ DebugBreak();
959 Result result = cgen()->allocator()->Allocate(eax);
960 ASSERT(result.is_valid());
961 }
962 #endif
963
964
965 Result VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
966 InvokeFlag flag,
967 int arg_count) {
968 PrepareForCall(arg_count, arg_count);
969 ASSERT(cgen()->HasValidEntryRegisters());
970 __ InvokeBuiltin(id, flag);
971 Result result = cgen()->allocator()->Allocate(eax);
972 ASSERT(result.is_valid());
973 return result;
974 }
975
976
977 Result VirtualFrame::RawCallCodeObject(Handle<Code> code,
978 RelocInfo::Mode rmode) {
979 ASSERT(cgen()->HasValidEntryRegisters());
980 __ call(code, rmode);
981 Result result = cgen()->allocator()->Allocate(eax);
982 ASSERT(result.is_valid());
983 return result;
984 }
985
986
987 // This function assumes that the only results that could be in a_reg or b_reg
988 // are a and b. Other results can be live, but must not be in a_reg or b_reg.
989 void VirtualFrame::MoveResultsToRegisters(Result* a,
990 Result* b,
991 Register a_reg,
992 Register b_reg) {
993 if (a->is_register() && a->reg().is(a_reg)) {
994 b->ToRegister(b_reg);
995 } else if (!cgen()->allocator()->is_used(a_reg)) {
996 a->ToRegister(a_reg);
997 b->ToRegister(b_reg);
998 } else if (cgen()->allocator()->is_used(b_reg)) {
999 // a must be in b_reg, b in a_reg.
1000 __ xchg(a_reg, b_reg);
1001 // Results a and b will be invalidated, so it is ok if they are switched.
1002 } else {
1003 b->ToRegister(b_reg);
1004 a->ToRegister(a_reg);
1005 }
1006 a->Unuse();
1007 b->Unuse();
1008 }
1009
1010
1011 Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
1012 // Name and receiver are on the top of the frame. The IC expects
1013 // name in ecx and receiver in eax.
1014 Result name = Pop();
1015 Result receiver = Pop();
1016 PrepareForCall(0, 0); // No stack arguments.
1017 MoveResultsToRegisters(&name, &receiver, ecx, eax);
1018
1019 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1020 Builtins::kLoadIC_Initialize));
1021 return RawCallCodeObject(ic, mode);
1022 }
1023
1024
1025 Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
1026 // Key and receiver are on top of the frame. Put them in eax and edx.
1027 Result key = Pop();
1028 Result receiver = Pop();
1029 PrepareForCall(0, 0);
1030 MoveResultsToRegisters(&key, &receiver, eax, edx);
1031
1032 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1033 Builtins::kKeyedLoadIC_Initialize));
1034 return RawCallCodeObject(ic, mode);
1035 }
1036
1037
1038 Result VirtualFrame::CallStoreIC(Handle<String> name,
1039 bool is_contextual,
1040 StrictModeFlag strict_mode) {
1041 // Value and (if not contextual) receiver are on top of the frame.
1042 // The IC expects name in ecx, value in eax, and receiver in edx.
1043 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1044 (strict_mode == kStrictMode) ? Builtins::kStoreIC_Initialize_Strict
1045 : Builtins::kStoreIC_Initialize));
1046
1047 Result value = Pop();
1048 RelocInfo::Mode mode;
1049 if (is_contextual) {
1050 PrepareForCall(0, 0);
1051 value.ToRegister(eax);
1052 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1053 value.Unuse();
1054 mode = RelocInfo::CODE_TARGET_CONTEXT;
1055 } else {
1056 Result receiver = Pop();
1057 PrepareForCall(0, 0);
1058 MoveResultsToRegisters(&value, &receiver, eax, edx);
1059 mode = RelocInfo::CODE_TARGET;
1060 }
1061 __ mov(ecx, name);
1062 return RawCallCodeObject(ic, mode);
1063 }
1064
1065
1066 Result VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
1067 // Value, key, and receiver are on the top of the frame. The IC
1068 // expects value in eax, key in ecx, and receiver in edx.
1069 Result value = Pop();
1070 Result key = Pop();
1071 Result receiver = Pop();
1072 PrepareForCall(0, 0);
1073 if (!cgen()->allocator()->is_used(eax) ||
1074 (value.is_register() && value.reg().is(eax))) {
1075 if (!cgen()->allocator()->is_used(eax)) {
1076 value.ToRegister(eax);
1077 }
1078 MoveResultsToRegisters(&key, &receiver, ecx, edx);
1079 value.Unuse();
1080 } else if (!cgen()->allocator()->is_used(ecx) ||
1081 (key.is_register() && key.reg().is(ecx))) {
1082 if (!cgen()->allocator()->is_used(ecx)) {
1083 key.ToRegister(ecx);
1084 }
1085 MoveResultsToRegisters(&value, &receiver, eax, edx);
1086 key.Unuse();
1087 } else if (!cgen()->allocator()->is_used(edx) ||
1088 (receiver.is_register() && receiver.reg().is(edx))) {
1089 if (!cgen()->allocator()->is_used(edx)) {
1090 receiver.ToRegister(edx);
1091 }
1092 MoveResultsToRegisters(&key, &value, ecx, eax);
1093 receiver.Unuse();
1094 } else {
1095 // All three registers are used, and no value is in the correct place.
1096 // We have one of the two circular permutations of eax, ecx, edx.
1097 ASSERT(value.is_register());
1098 if (value.reg().is(ecx)) {
1099 __ xchg(eax, edx);
1100 __ xchg(eax, ecx);
1101 } else {
1102 __ xchg(eax, ecx);
1103 __ xchg(eax, edx);
1104 }
1105 value.Unuse();
1106 key.Unuse();
1107 receiver.Unuse();
1108 }
1109
1110 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1111 (strict_mode == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict
1112 : Builtins::kKeyedStoreIC_Initialize));
1113 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
1114 }
1115
1116
1117 Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
1118 int arg_count,
1119 int loop_nesting) {
1120 // Function name, arguments, and receiver are on top of the frame.
1121 // The IC expects the name in ecx and the rest on the stack and
1122 // drops them all.
1123 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1124 Handle<Code> ic = Isolate::Current()->stub_cache()->ComputeCallInitialize(
1125 arg_count, in_loop);
1126 // Spill args, receiver, and function. The call will drop args and
1127 // receiver.
1128 Result name = Pop();
1129 PrepareForCall(arg_count + 1, arg_count + 1); // Arguments + receiver.
1130 name.ToRegister(ecx);
1131 name.Unuse();
1132 return RawCallCodeObject(ic, mode);
1133 }
1134
1135
1136 Result VirtualFrame::CallKeyedCallIC(RelocInfo::Mode mode,
1137 int arg_count,
1138 int loop_nesting) {
1139 // Function name, arguments, and receiver are on top of the frame.
1140 // The IC expects the name in ecx and the rest on the stack and
1141 // drops them all.
1142 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1143 Handle<Code> ic =
1144 Isolate::Current()->stub_cache()->ComputeKeyedCallInitialize(arg_count,
1145 in_loop);
1146 // Spill args, receiver, and function. The call will drop args and
1147 // receiver.
1148 Result name = Pop();
1149 PrepareForCall(arg_count + 1, arg_count + 1); // Arguments + receiver.
1150 name.ToRegister(ecx);
1151 name.Unuse();
1152 return RawCallCodeObject(ic, mode);
1153 }
1154
1155
1156 Result VirtualFrame::CallConstructor(int arg_count) {
1157 // Arguments, receiver, and function are on top of the frame. The
1158 // IC expects arg count in eax, function in edi, and the arguments
1159 // and receiver on the stack.
1160 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1161 Builtins::kJSConstructCall));
1162 // Duplicate the function before preparing the frame.
1163 PushElementAt(arg_count);
1164 Result function = Pop();
1165 PrepareForCall(arg_count + 1, arg_count + 1); // Spill function and args.
1166 function.ToRegister(edi);
1167
1168 // Constructors are called with the number of arguments in register
1169 // eax for now. Another option would be to have separate construct
1170 // call trampolines per different arguments counts encountered.
1171 Result num_args = cgen()->allocator()->Allocate(eax);
1172 ASSERT(num_args.is_valid());
1173 __ Set(num_args.reg(), Immediate(arg_count));
1174
1175 function.Unuse();
1176 num_args.Unuse();
1177 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL);
1178 }
1179
1180
1181 void VirtualFrame::Drop(int count) {
1182 ASSERT(count >= 0);
1183 ASSERT(height() >= count);
1184 int num_virtual_elements = (element_count() - 1) - stack_pointer_;
1185
1186 // Emit code to lower the stack pointer if necessary.
1187 if (num_virtual_elements < count) {
1188 int num_dropped = count - num_virtual_elements;
1189 stack_pointer_ -= num_dropped;
1190 __ add(Operand(esp), Immediate(num_dropped * kPointerSize));
1191 }
1192
1193 // Discard elements from the virtual frame and free any registers.
1194 for (int i = 0; i < count; i++) {
1195 FrameElement dropped = elements_.RemoveLast();
1196 if (dropped.is_register()) {
1197 Unuse(dropped.reg());
1198 }
1199 }
1200 }
1201
1202
1203 Result VirtualFrame::Pop() {
1204 FrameElement element = elements_.RemoveLast();
1205 int index = element_count();
1206 ASSERT(element.is_valid());
1207 ASSERT(element.is_untagged_int32() == cgen()->in_safe_int32_mode());
1208
1209 // Get number type information of the result.
1210 TypeInfo info;
1211 if (!element.is_copy()) {
1212 info = element.type_info();
1213 } else {
1214 info = elements_[element.index()].type_info();
1215 }
1216
1217 bool pop_needed = (stack_pointer_ == index);
1218 if (pop_needed) {
1219 stack_pointer_--;
1220 if (element.is_memory()) {
1221 Result temp = cgen()->allocator()->Allocate();
1222 ASSERT(temp.is_valid());
1223 __ pop(temp.reg());
1224 temp.set_type_info(info);
1225 temp.set_untagged_int32(element.is_untagged_int32());
1226 return temp;
1227 }
1228
1229 __ add(Operand(esp), Immediate(kPointerSize));
1230 }
1231 ASSERT(!element.is_memory());
1232
1233 // The top element is a register, constant, or a copy. Unuse
1234 // registers and follow copies to their backing store.
1235 if (element.is_register()) {
1236 Unuse(element.reg());
1237 } else if (element.is_copy()) {
1238 ASSERT(!element.is_untagged_int32());
1239 ASSERT(element.index() < index);
1240 index = element.index();
1241 element = elements_[index];
1242 }
1243 ASSERT(!element.is_copy());
1244
1245 // The element is memory, a register, or a constant.
1246 if (element.is_memory()) {
1247 // Memory elements could only be the backing store of a copy.
1248 // Allocate the original to a register.
1249 ASSERT(index <= stack_pointer_);
1250 ASSERT(!element.is_untagged_int32());
1251 Result temp = cgen()->allocator()->Allocate();
1252 ASSERT(temp.is_valid());
1253 Use(temp.reg(), index);
1254 FrameElement new_element =
1255 FrameElement::RegisterElement(temp.reg(),
1256 FrameElement::SYNCED,
1257 element.type_info());
1258 // Preserve the copy flag on the element.
1259 if (element.is_copied()) new_element.set_copied();
1260 elements_[index] = new_element;
1261 __ mov(temp.reg(), Operand(ebp, fp_relative(index)));
1262 return Result(temp.reg(), info);
1263 } else if (element.is_register()) {
1264 Result return_value(element.reg(), info);
1265 return_value.set_untagged_int32(element.is_untagged_int32());
1266 return return_value;
1267 } else {
1268 ASSERT(element.is_constant());
1269 Result return_value(element.handle());
1270 return_value.set_untagged_int32(element.is_untagged_int32());
1271 return return_value;
1272 }
1273 }
1274
1275
1276 void VirtualFrame::EmitPop(Register reg) {
1277 ASSERT(stack_pointer_ == element_count() - 1);
1278 stack_pointer_--;
1279 elements_.RemoveLast();
1280 __ pop(reg);
1281 }
1282
1283
1284 void VirtualFrame::EmitPop(Operand operand) {
1285 ASSERT(stack_pointer_ == element_count() - 1);
1286 stack_pointer_--;
1287 elements_.RemoveLast();
1288 __ pop(operand);
1289 }
1290
1291
1292 void VirtualFrame::EmitPush(Register reg, TypeInfo info) {
1293 ASSERT(stack_pointer_ == element_count() - 1);
1294 elements_.Add(FrameElement::MemoryElement(info));
1295 stack_pointer_++;
1296 __ push(reg);
1297 }
1298
1299
1300 void VirtualFrame::EmitPush(Operand operand, TypeInfo info) {
1301 ASSERT(stack_pointer_ == element_count() - 1);
1302 elements_.Add(FrameElement::MemoryElement(info));
1303 stack_pointer_++;
1304 __ push(operand);
1305 }
1306
1307
1308 void VirtualFrame::EmitPush(Immediate immediate, TypeInfo info) {
1309 ASSERT(stack_pointer_ == element_count() - 1);
1310 elements_.Add(FrameElement::MemoryElement(info));
1311 stack_pointer_++;
1312 __ push(immediate);
1313 }
1314
1315
1316 void VirtualFrame::PushUntaggedElement(Handle<Object> value) {
1317 ASSERT(!ConstantPoolOverflowed());
1318 elements_.Add(FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED));
1319 elements_[element_count() - 1].set_untagged_int32(true);
1320 }
1321
1322
1323 void VirtualFrame::Push(Expression* expr) {
1324 ASSERT(expr->IsTrivial());
1325
1326 Literal* lit = expr->AsLiteral();
1327 if (lit != NULL) {
1328 Push(lit->handle());
1329 return;
1330 }
1331
1332 VariableProxy* proxy = expr->AsVariableProxy();
1333 if (proxy != NULL) {
1334 Slot* slot = proxy->var()->AsSlot();
1335 if (slot->type() == Slot::LOCAL) {
1336 PushLocalAt(slot->index());
1337 return;
1338 }
1339 if (slot->type() == Slot::PARAMETER) {
1340 PushParameterAt(slot->index());
1341 return;
1342 }
1343 }
1344 UNREACHABLE();
1345 }
1346
1347
1348 void VirtualFrame::Push(Handle<Object> value) {
1349 if (ConstantPoolOverflowed()) {
1350 Result temp = cgen()->allocator()->Allocate();
1351 ASSERT(temp.is_valid());
1352 __ Set(temp.reg(), Immediate(value));
1353 Push(&temp);
1354 } else {
1355 FrameElement element =
1356 FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED);
1357 elements_.Add(element);
1358 }
1359 }
1360
1361
1362 #undef __
1363
1364 } } // namespace v8::internal
1365
1366 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/virtual-frame-ia32.h ('k') | src/jump-target.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698