Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/x64/virtual-frame-x64.cc

Issue 6811012: Remove some dead code. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/virtual-frame-x64.h ('k') | test/cctest/test-log-stack-tracer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_X64)
31
32 #include "codegen-inl.h"
33 #include "register-allocator-inl.h"
34 #include "scopes.h"
35 #include "stub-cache.h"
36 #include "virtual-frame-inl.h"
37
38 namespace v8 {
39 namespace internal {
40
41 #define __ ACCESS_MASM(masm())
42
43 void VirtualFrame::Enter() {
44 // Registers live on entry to a JS frame:
45 // rsp: stack pointer, points to return address from this function.
46 // rbp: base pointer, points to previous JS, ArgumentsAdaptor, or
47 // Trampoline frame.
48 // rsi: context of this function call.
49 // rdi: pointer to this function object.
50 Comment cmnt(masm(), "[ Enter JS frame");
51
52 #ifdef DEBUG
53 if (FLAG_debug_code) {
54 // Verify that rdi contains a JS function. The following code
55 // relies on rax being available for use.
56 Condition not_smi = NegateCondition(masm()->CheckSmi(rdi));
57 __ Check(not_smi,
58 "VirtualFrame::Enter - rdi is not a function (smi check).");
59 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
60 __ Check(equal,
61 "VirtualFrame::Enter - rdi is not a function (map check).");
62 }
63 #endif
64
65 EmitPush(rbp);
66
67 __ movq(rbp, rsp);
68
69 // Store the context in the frame. The context is kept in rsi and a
70 // copy is stored in the frame. The external reference to rsi
71 // remains.
72 EmitPush(rsi);
73
74 // Store the function in the frame. The frame owns the register
75 // reference now (ie, it can keep it in rdi or spill it later).
76 Push(rdi);
77 SyncElementAt(element_count() - 1);
78 cgen()->allocator()->Unuse(rdi);
79 }
80
81
82 void VirtualFrame::Exit() {
83 Comment cmnt(masm(), "[ Exit JS frame");
84 // Record the location of the JS exit code for patching when setting
85 // break point.
86 __ RecordJSReturn();
87
88 // Avoid using the leave instruction here, because it is too
89 // short. We need the return sequence to be a least the size of a
90 // call instruction to support patching the exit code in the
91 // debugger. See GenerateReturnSequence for the full return sequence.
92 // TODO(X64): A patched call will be very long now. Make sure we
93 // have enough room.
94 __ movq(rsp, rbp);
95 stack_pointer_ = frame_pointer();
96 for (int i = element_count() - 1; i > stack_pointer_; i--) {
97 FrameElement last = elements_.RemoveLast();
98 if (last.is_register()) {
99 Unuse(last.reg());
100 }
101 }
102
103 EmitPop(rbp);
104 }
105
106
107 void VirtualFrame::AllocateStackSlots() {
108 int count = local_count();
109 if (count > 0) {
110 Comment cmnt(masm(), "[ Allocate space for locals");
111 // The locals are initialized to a constant (the undefined value), but
112 // we sync them with the actual frame to allocate space for spilling
113 // them later. First sync everything above the stack pointer so we can
114 // use pushes to allocate and initialize the locals.
115 SyncRange(stack_pointer_ + 1, element_count() - 1);
116 Handle<Object> undefined = FACTORY->undefined_value();
117 FrameElement initial_value =
118 FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
119 if (count < kLocalVarBound) {
120 // For fewer locals the unrolled loop is more compact.
121
122 // Hope for one of the first eight registers, where the push operation
123 // takes only one byte (kScratchRegister needs the REX.W bit).
124 Result tmp = cgen()->allocator()->Allocate();
125 ASSERT(tmp.is_valid());
126 __ movq(tmp.reg(), undefined, RelocInfo::EMBEDDED_OBJECT);
127 for (int i = 0; i < count; i++) {
128 __ push(tmp.reg());
129 }
130 } else {
131 // For more locals a loop in generated code is more compact.
132 Label alloc_locals_loop;
133 Result cnt = cgen()->allocator()->Allocate();
134 ASSERT(cnt.is_valid());
135 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT);
136 #ifdef DEBUG
137 Label loop_size;
138 __ bind(&loop_size);
139 #endif
140 if (is_uint8(count)) {
141 // Loading imm8 is shorter than loading imm32.
142 // Loading only partial byte register, and using decb below.
143 __ movb(cnt.reg(), Immediate(count));
144 } else {
145 __ movl(cnt.reg(), Immediate(count));
146 }
147 __ bind(&alloc_locals_loop);
148 __ push(kScratchRegister);
149 if (is_uint8(count)) {
150 __ decb(cnt.reg());
151 } else {
152 __ decl(cnt.reg());
153 }
154 __ j(not_zero, &alloc_locals_loop);
155 #ifdef DEBUG
156 CHECK(masm()->SizeOfCodeGeneratedSince(&loop_size) < kLocalVarBound);
157 #endif
158 }
159 for (int i = 0; i < count; i++) {
160 elements_.Add(initial_value);
161 stack_pointer_++;
162 }
163 }
164 }
165
166
167 void VirtualFrame::SaveContextRegister() {
168 ASSERT(elements_[context_index()].is_memory());
169 __ movq(Operand(rbp, fp_relative(context_index())), rsi);
170 }
171
172
173 void VirtualFrame::RestoreContextRegister() {
174 ASSERT(elements_[context_index()].is_memory());
175 __ movq(rsi, Operand(rbp, fp_relative(context_index())));
176 }
177
178
179 void VirtualFrame::PushReceiverSlotAddress() {
180 Result temp = cgen()->allocator()->Allocate();
181 ASSERT(temp.is_valid());
182 __ lea(temp.reg(), ParameterAt(-1));
183 Push(&temp);
184 }
185
186
187 void VirtualFrame::EmitPop(Register reg) {
188 ASSERT(stack_pointer_ == element_count() - 1);
189 stack_pointer_--;
190 elements_.RemoveLast();
191 __ pop(reg);
192 }
193
194
195 void VirtualFrame::EmitPop(const Operand& operand) {
196 ASSERT(stack_pointer_ == element_count() - 1);
197 stack_pointer_--;
198 elements_.RemoveLast();
199 __ pop(operand);
200 }
201
202
203 void VirtualFrame::EmitPush(Register reg, TypeInfo info) {
204 ASSERT(stack_pointer_ == element_count() - 1);
205 elements_.Add(FrameElement::MemoryElement(info));
206 stack_pointer_++;
207 __ push(reg);
208 }
209
210
211 void VirtualFrame::EmitPush(const Operand& operand, TypeInfo info) {
212 ASSERT(stack_pointer_ == element_count() - 1);
213 elements_.Add(FrameElement::MemoryElement(info));
214 stack_pointer_++;
215 __ push(operand);
216 }
217
218
219 void VirtualFrame::EmitPush(Immediate immediate, TypeInfo info) {
220 ASSERT(stack_pointer_ == element_count() - 1);
221 elements_.Add(FrameElement::MemoryElement(info));
222 stack_pointer_++;
223 __ push(immediate);
224 }
225
226
227 void VirtualFrame::EmitPush(Smi* smi_value) {
228 ASSERT(stack_pointer_ == element_count() - 1);
229 elements_.Add(FrameElement::MemoryElement(TypeInfo::Smi()));
230 stack_pointer_++;
231 __ Push(smi_value);
232 }
233
234
235 void VirtualFrame::EmitPush(Handle<Object> value) {
236 ASSERT(stack_pointer_ == element_count() - 1);
237 TypeInfo info = TypeInfo::TypeFromValue(value);
238 elements_.Add(FrameElement::MemoryElement(info));
239 stack_pointer_++;
240 __ Push(value);
241 }
242
243
244 void VirtualFrame::EmitPush(Heap::RootListIndex index, TypeInfo info) {
245 ASSERT(stack_pointer_ == element_count() - 1);
246 elements_.Add(FrameElement::MemoryElement(info));
247 stack_pointer_++;
248 __ PushRoot(index);
249 }
250
251
252 void VirtualFrame::Push(Expression* expr) {
253 ASSERT(expr->IsTrivial());
254
255 Literal* lit = expr->AsLiteral();
256 if (lit != NULL) {
257 Push(lit->handle());
258 return;
259 }
260
261 VariableProxy* proxy = expr->AsVariableProxy();
262 if (proxy != NULL) {
263 Slot* slot = proxy->var()->AsSlot();
264 if (slot->type() == Slot::LOCAL) {
265 PushLocalAt(slot->index());
266 return;
267 }
268 if (slot->type() == Slot::PARAMETER) {
269 PushParameterAt(slot->index());
270 return;
271 }
272 }
273 UNREACHABLE();
274 }
275
276
277 void VirtualFrame::Push(Handle<Object> value) {
278 if (ConstantPoolOverflowed()) {
279 Result temp = cgen()->allocator()->Allocate();
280 ASSERT(temp.is_valid());
281 if (value->IsSmi()) {
282 __ Move(temp.reg(), Smi::cast(*value));
283 } else {
284 __ movq(temp.reg(), value, RelocInfo::EMBEDDED_OBJECT);
285 }
286 Push(&temp);
287 } else {
288 FrameElement element =
289 FrameElement::ConstantElement(value, FrameElement::NOT_SYNCED);
290 elements_.Add(element);
291 }
292 }
293
294
295 void VirtualFrame::Drop(int count) {
296 ASSERT(count >= 0);
297 ASSERT(height() >= count);
298 int num_virtual_elements = (element_count() - 1) - stack_pointer_;
299
300 // Emit code to lower the stack pointer if necessary.
301 if (num_virtual_elements < count) {
302 int num_dropped = count - num_virtual_elements;
303 stack_pointer_ -= num_dropped;
304 __ addq(rsp, Immediate(num_dropped * kPointerSize));
305 }
306
307 // Discard elements from the virtual frame and free any registers.
308 for (int i = 0; i < count; i++) {
309 FrameElement dropped = elements_.RemoveLast();
310 if (dropped.is_register()) {
311 Unuse(dropped.reg());
312 }
313 }
314 }
315
316
317 int VirtualFrame::InvalidateFrameSlotAt(int index) {
318 FrameElement original = elements_[index];
319
320 // Is this element the backing store of any copies?
321 int new_backing_index = kIllegalIndex;
322 if (original.is_copied()) {
323 // Verify it is copied, and find first copy.
324 for (int i = index + 1; i < element_count(); i++) {
325 if (elements_[i].is_copy() && elements_[i].index() == index) {
326 new_backing_index = i;
327 break;
328 }
329 }
330 }
331
332 if (new_backing_index == kIllegalIndex) {
333 // No copies found, return kIllegalIndex.
334 if (original.is_register()) {
335 Unuse(original.reg());
336 }
337 elements_[index] = FrameElement::InvalidElement();
338 return kIllegalIndex;
339 }
340
341 // This is the backing store of copies.
342 Register backing_reg;
343 if (original.is_memory()) {
344 Result fresh = cgen()->allocator()->Allocate();
345 ASSERT(fresh.is_valid());
346 Use(fresh.reg(), new_backing_index);
347 backing_reg = fresh.reg();
348 __ movq(backing_reg, Operand(rbp, fp_relative(index)));
349 } else {
350 // The original was in a register.
351 backing_reg = original.reg();
352 set_register_location(backing_reg, new_backing_index);
353 }
354 // Invalidate the element at index.
355 elements_[index] = FrameElement::InvalidElement();
356 // Set the new backing element.
357 if (elements_[new_backing_index].is_synced()) {
358 elements_[new_backing_index] =
359 FrameElement::RegisterElement(backing_reg,
360 FrameElement::SYNCED,
361 original.type_info());
362 } else {
363 elements_[new_backing_index] =
364 FrameElement::RegisterElement(backing_reg,
365 FrameElement::NOT_SYNCED,
366 original.type_info());
367 }
368 // Update the other copies.
369 for (int i = new_backing_index + 1; i < element_count(); i++) {
370 if (elements_[i].is_copy() && elements_[i].index() == index) {
371 elements_[i].set_index(new_backing_index);
372 elements_[new_backing_index].set_copied();
373 }
374 }
375 return new_backing_index;
376 }
377
378
379 void VirtualFrame::TakeFrameSlotAt(int index) {
380 ASSERT(index >= 0);
381 ASSERT(index <= element_count());
382 FrameElement original = elements_[index];
383 int new_backing_store_index = InvalidateFrameSlotAt(index);
384 if (new_backing_store_index != kIllegalIndex) {
385 elements_.Add(CopyElementAt(new_backing_store_index));
386 return;
387 }
388
389 switch (original.type()) {
390 case FrameElement::MEMORY: {
391 // Emit code to load the original element's data into a register.
392 // Push that register as a FrameElement on top of the frame.
393 Result fresh = cgen()->allocator()->Allocate();
394 ASSERT(fresh.is_valid());
395 FrameElement new_element =
396 FrameElement::RegisterElement(fresh.reg(),
397 FrameElement::NOT_SYNCED,
398 original.type_info());
399 Use(fresh.reg(), element_count());
400 elements_.Add(new_element);
401 __ movq(fresh.reg(), Operand(rbp, fp_relative(index)));
402 break;
403 }
404 case FrameElement::REGISTER:
405 Use(original.reg(), element_count());
406 // Fall through.
407 case FrameElement::CONSTANT:
408 case FrameElement::COPY:
409 original.clear_sync();
410 elements_.Add(original);
411 break;
412 case FrameElement::INVALID:
413 UNREACHABLE();
414 break;
415 }
416 }
417
418
419 void VirtualFrame::StoreToFrameSlotAt(int index) {
420 // Store the value on top of the frame to the virtual frame slot at
421 // a given index. The value on top of the frame is left in place.
422 // This is a duplicating operation, so it can create copies.
423 ASSERT(index >= 0);
424 ASSERT(index < element_count());
425
426 int top_index = element_count() - 1;
427 FrameElement top = elements_[top_index];
428 FrameElement original = elements_[index];
429 if (top.is_copy() && top.index() == index) return;
430 ASSERT(top.is_valid());
431
432 InvalidateFrameSlotAt(index);
433
434 // InvalidateFrameSlotAt can potentially change any frame element, due
435 // to spilling registers to allocate temporaries in order to preserve
436 // the copy-on-write semantics of aliased elements. Reload top from
437 // the frame.
438 top = elements_[top_index];
439
440 if (top.is_copy()) {
441 // There are two cases based on the relative positions of the
442 // stored-to slot and the backing slot of the top element.
443 int backing_index = top.index();
444 ASSERT(backing_index != index);
445 if (backing_index < index) {
446 // 1. The top element is a copy of a slot below the stored-to
447 // slot. The stored-to slot becomes an unsynced copy of that
448 // same backing slot.
449 elements_[index] = CopyElementAt(backing_index);
450 } else {
451 // 2. The top element is a copy of a slot above the stored-to
452 // slot. The stored-to slot becomes the new (unsynced) backing
453 // slot and both the top element and the element at the former
454 // backing slot become copies of it. The sync state of the top
455 // and former backing elements is preserved.
456 FrameElement backing_element = elements_[backing_index];
457 ASSERT(backing_element.is_memory() || backing_element.is_register());
458 if (backing_element.is_memory()) {
459 // Because sets of copies are canonicalized to be backed by
460 // their lowest frame element, and because memory frame
461 // elements are backed by the corresponding stack address, we
462 // have to move the actual value down in the stack.
463 //
464 // TODO(209): considering allocating the stored-to slot to the
465 // temp register. Alternatively, allow copies to appear in
466 // any order in the frame and lazily move the value down to
467 // the slot.
468 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index)));
469 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
470 } else {
471 set_register_location(backing_element.reg(), index);
472 if (backing_element.is_synced()) {
473 // If the element is a register, we will not actually move
474 // anything on the stack but only update the virtual frame
475 // element.
476 backing_element.clear_sync();
477 }
478 }
479 elements_[index] = backing_element;
480
481 // The old backing element becomes a copy of the new backing
482 // element.
483 FrameElement new_element = CopyElementAt(index);
484 elements_[backing_index] = new_element;
485 if (backing_element.is_synced()) {
486 elements_[backing_index].set_sync();
487 }
488
489 // All the copies of the old backing element (including the top
490 // element) become copies of the new backing element.
491 for (int i = backing_index + 1; i < element_count(); i++) {
492 if (elements_[i].is_copy() && elements_[i].index() == backing_index) {
493 elements_[i].set_index(index);
494 }
495 }
496 }
497 return;
498 }
499
500 // Move the top element to the stored-to slot and replace it (the
501 // top element) with a copy.
502 elements_[index] = top;
503 if (top.is_memory()) {
504 // TODO(209): consider allocating the stored-to slot to the temp
505 // register. Alternatively, allow copies to appear in any order
506 // in the frame and lazily move the value down to the slot.
507 FrameElement new_top = CopyElementAt(index);
508 new_top.set_sync();
509 elements_[top_index] = new_top;
510
511 // The sync state of the former top element is correct (synced).
512 // Emit code to move the value down in the frame.
513 __ movq(kScratchRegister, Operand(rsp, 0));
514 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
515 } else if (top.is_register()) {
516 set_register_location(top.reg(), index);
517 // The stored-to slot has the (unsynced) register reference and
518 // the top element becomes a copy. The sync state of the top is
519 // preserved.
520 FrameElement new_top = CopyElementAt(index);
521 if (top.is_synced()) {
522 new_top.set_sync();
523 elements_[index].clear_sync();
524 }
525 elements_[top_index] = new_top;
526 } else {
527 // The stored-to slot holds the same value as the top but
528 // unsynced. (We do not have copies of constants yet.)
529 ASSERT(top.is_constant());
530 elements_[index].clear_sync();
531 }
532 }
533
534
535 void VirtualFrame::MakeMergable() {
536 for (int i = 0; i < element_count(); i++) {
537 FrameElement element = elements_[i];
538
539 // In all cases we have to reset the number type information
540 // to unknown for a mergable frame because of incoming back edges.
541 if (element.is_constant() || element.is_copy()) {
542 if (element.is_synced()) {
543 // Just spill.
544 elements_[i] = FrameElement::MemoryElement(TypeInfo::Unknown());
545 } else {
546 // Allocate to a register.
547 FrameElement backing_element; // Invalid if not a copy.
548 if (element.is_copy()) {
549 backing_element = elements_[element.index()];
550 }
551 Result fresh = cgen()->allocator()->Allocate();
552 ASSERT(fresh.is_valid()); // A register was spilled if all were in use.
553 elements_[i] =
554 FrameElement::RegisterElement(fresh.reg(),
555 FrameElement::NOT_SYNCED,
556 TypeInfo::Unknown());
557 Use(fresh.reg(), i);
558
559 // Emit a move.
560 if (element.is_constant()) {
561 __ Move(fresh.reg(), element.handle());
562 } else {
563 ASSERT(element.is_copy());
564 // Copies are only backed by register or memory locations.
565 if (backing_element.is_register()) {
566 // The backing store may have been spilled by allocating,
567 // but that's OK. If it was, the value is right where we
568 // want it.
569 if (!fresh.reg().is(backing_element.reg())) {
570 __ movq(fresh.reg(), backing_element.reg());
571 }
572 } else {
573 ASSERT(backing_element.is_memory());
574 __ movq(fresh.reg(), Operand(rbp, fp_relative(element.index())));
575 }
576 }
577 }
578 // No need to set the copied flag --- there are no copies.
579 } else {
580 // Clear the copy flag of non-constant, non-copy elements.
581 // They cannot be copied because copies are not allowed.
582 // The copy flag is not relied on before the end of this loop,
583 // including when registers are spilled.
584 elements_[i].clear_copied();
585 elements_[i].set_type_info(TypeInfo::Unknown());
586 }
587 }
588 }
589
590
591 void VirtualFrame::MergeTo(VirtualFrame* expected) {
592 Comment cmnt(masm(), "[ Merge frame");
593 // We should always be merging the code generator's current frame to an
594 // expected frame.
595 ASSERT(cgen()->frame() == this);
596
597 // Adjust the stack pointer upward (toward the top of the virtual
598 // frame) if necessary.
599 if (stack_pointer_ < expected->stack_pointer_) {
600 int difference = expected->stack_pointer_ - stack_pointer_;
601 stack_pointer_ = expected->stack_pointer_;
602 __ subq(rsp, Immediate(difference * kPointerSize));
603 }
604
605 MergeMoveRegistersToMemory(expected);
606 MergeMoveRegistersToRegisters(expected);
607 MergeMoveMemoryToRegisters(expected);
608
609 // Adjust the stack pointer downward if necessary.
610 if (stack_pointer_ > expected->stack_pointer_) {
611 int difference = stack_pointer_ - expected->stack_pointer_;
612 stack_pointer_ = expected->stack_pointer_;
613 __ addq(rsp, Immediate(difference * kPointerSize));
614 }
615
616 // At this point, the frames should be identical.
617 ASSERT(Equals(expected));
618 }
619
620
621 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
622 ASSERT(stack_pointer_ >= expected->stack_pointer_);
623
624 // Move registers, constants, and copies to memory. Perform moves
625 // from the top downward in the frame in order to leave the backing
626 // stores of copies in registers.
627 for (int i = element_count() - 1; i >= 0; i--) {
628 FrameElement target = expected->elements_[i];
629 if (target.is_register()) continue; // Handle registers later.
630 if (target.is_memory()) {
631 FrameElement source = elements_[i];
632 switch (source.type()) {
633 case FrameElement::INVALID:
634 // Not a legal merge move.
635 UNREACHABLE();
636 break;
637
638 case FrameElement::MEMORY:
639 // Already in place.
640 break;
641
642 case FrameElement::REGISTER:
643 Unuse(source.reg());
644 if (!source.is_synced()) {
645 __ movq(Operand(rbp, fp_relative(i)), source.reg());
646 }
647 break;
648
649 case FrameElement::CONSTANT:
650 if (!source.is_synced()) {
651 __ Move(Operand(rbp, fp_relative(i)), source.handle());
652 }
653 break;
654
655 case FrameElement::COPY:
656 if (!source.is_synced()) {
657 int backing_index = source.index();
658 FrameElement backing_element = elements_[backing_index];
659 if (backing_element.is_memory()) {
660 __ movq(kScratchRegister,
661 Operand(rbp, fp_relative(backing_index)));
662 __ movq(Operand(rbp, fp_relative(i)), kScratchRegister);
663 } else {
664 ASSERT(backing_element.is_register());
665 __ movq(Operand(rbp, fp_relative(i)), backing_element.reg());
666 }
667 }
668 break;
669 }
670 }
671 elements_[i] = target;
672 }
673 }
674
675
676 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
677 // We have already done X-to-memory moves.
678 ASSERT(stack_pointer_ >= expected->stack_pointer_);
679
680 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
681 // Move the right value into register i if it is currently in a register.
682 int index = expected->register_location(i);
683 int use_index = register_location(i);
684 // Skip if register i is unused in the target or else if source is
685 // not a register (this is not a register-to-register move).
686 if (index == kIllegalIndex || !elements_[index].is_register()) continue;
687
688 Register target = RegisterAllocator::ToRegister(i);
689 Register source = elements_[index].reg();
690 if (index != use_index) {
691 if (use_index == kIllegalIndex) { // Target is currently unused.
692 // Copy contents of source from source to target.
693 // Set frame element register to target.
694 Use(target, index);
695 Unuse(source);
696 __ movq(target, source);
697 } else {
698 // Exchange contents of registers source and target.
699 // Nothing except the register backing use_index has changed.
700 elements_[use_index].set_reg(source);
701 set_register_location(target, index);
702 set_register_location(source, use_index);
703 __ xchg(source, target);
704 }
705 }
706
707 if (!elements_[index].is_synced() &&
708 expected->elements_[index].is_synced()) {
709 __ movq(Operand(rbp, fp_relative(index)), target);
710 }
711 elements_[index] = expected->elements_[index];
712 }
713 }
714
715
716 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
717 // Move memory, constants, and copies to registers. This is the
718 // final step and since it is not done from the bottom up, but in
719 // register code order, we have special code to ensure that the backing
720 // elements of copies are in their correct locations when we
721 // encounter the copies.
722 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
723 int index = expected->register_location(i);
724 if (index != kIllegalIndex) {
725 FrameElement source = elements_[index];
726 FrameElement target = expected->elements_[index];
727 Register target_reg = RegisterAllocator::ToRegister(i);
728 ASSERT(target.reg().is(target_reg));
729 switch (source.type()) {
730 case FrameElement::INVALID: // Fall through.
731 UNREACHABLE();
732 break;
733 case FrameElement::REGISTER:
734 ASSERT(source.Equals(target));
735 // Go to next iteration. Skips Use(target_reg) and syncing
736 // below. It is safe to skip syncing because a target
737 // register frame element would only be synced if all source
738 // elements were.
739 continue;
740 break;
741 case FrameElement::MEMORY:
742 ASSERT(index <= stack_pointer_);
743 __ movq(target_reg, Operand(rbp, fp_relative(index)));
744 break;
745
746 case FrameElement::CONSTANT:
747 __ Move(target_reg, source.handle());
748 break;
749
750 case FrameElement::COPY: {
751 int backing_index = source.index();
752 FrameElement backing = elements_[backing_index];
753 ASSERT(backing.is_memory() || backing.is_register());
754 if (backing.is_memory()) {
755 ASSERT(backing_index <= stack_pointer_);
756 // Code optimization if backing store should also move
757 // to a register: move backing store to its register first.
758 if (expected->elements_[backing_index].is_register()) {
759 FrameElement new_backing = expected->elements_[backing_index];
760 Register new_backing_reg = new_backing.reg();
761 ASSERT(!is_used(new_backing_reg));
762 elements_[backing_index] = new_backing;
763 Use(new_backing_reg, backing_index);
764 __ movq(new_backing_reg,
765 Operand(rbp, fp_relative(backing_index)));
766 __ movq(target_reg, new_backing_reg);
767 } else {
768 __ movq(target_reg, Operand(rbp, fp_relative(backing_index)));
769 }
770 } else {
771 __ movq(target_reg, backing.reg());
772 }
773 }
774 }
775 // Ensure the proper sync state.
776 if (target.is_synced() && !source.is_synced()) {
777 __ movq(Operand(rbp, fp_relative(index)), target_reg);
778 }
779 Use(target_reg, index);
780 elements_[index] = target;
781 }
782 }
783 }
784
785
786 Result VirtualFrame::Pop() {
787 FrameElement element = elements_.RemoveLast();
788 int index = element_count();
789 ASSERT(element.is_valid());
790
791 // Get number type information of the result.
792 TypeInfo info;
793 if (!element.is_copy()) {
794 info = element.type_info();
795 } else {
796 info = elements_[element.index()].type_info();
797 }
798
799 bool pop_needed = (stack_pointer_ == index);
800 if (pop_needed) {
801 stack_pointer_--;
802 if (element.is_memory()) {
803 Result temp = cgen()->allocator()->Allocate();
804 ASSERT(temp.is_valid());
805 __ pop(temp.reg());
806 temp.set_type_info(info);
807 return temp;
808 }
809
810 __ addq(rsp, Immediate(kPointerSize));
811 }
812 ASSERT(!element.is_memory());
813
814 // The top element is a register, constant, or a copy. Unuse
815 // registers and follow copies to their backing store.
816 if (element.is_register()) {
817 Unuse(element.reg());
818 } else if (element.is_copy()) {
819 ASSERT(element.index() < index);
820 index = element.index();
821 element = elements_[index];
822 }
823 ASSERT(!element.is_copy());
824
825 // The element is memory, a register, or a constant.
826 if (element.is_memory()) {
827 // Memory elements could only be the backing store of a copy.
828 // Allocate the original to a register.
829 ASSERT(index <= stack_pointer_);
830 Result temp = cgen()->allocator()->Allocate();
831 ASSERT(temp.is_valid());
832 Use(temp.reg(), index);
833 FrameElement new_element =
834 FrameElement::RegisterElement(temp.reg(),
835 FrameElement::SYNCED,
836 element.type_info());
837 // Preserve the copy flag on the element.
838 if (element.is_copied()) new_element.set_copied();
839 elements_[index] = new_element;
840 __ movq(temp.reg(), Operand(rbp, fp_relative(index)));
841 return Result(temp.reg(), info);
842 } else if (element.is_register()) {
843 return Result(element.reg(), info);
844 } else {
845 ASSERT(element.is_constant());
846 return Result(element.handle());
847 }
848 }
849
850
851 Result VirtualFrame::RawCallStub(CodeStub* stub) {
852 ASSERT(cgen()->HasValidEntryRegisters());
853 __ CallStub(stub);
854 Result result = cgen()->allocator()->Allocate(rax);
855 ASSERT(result.is_valid());
856 return result;
857 }
858
859
860 Result VirtualFrame::CallStub(CodeStub* stub, Result* arg) {
861 PrepareForCall(0, 0);
862 arg->ToRegister(rax);
863 arg->Unuse();
864 return RawCallStub(stub);
865 }
866
867
868 Result VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
869 PrepareForCall(0, 0);
870
871 if (arg0->is_register() && arg0->reg().is(rax)) {
872 if (arg1->is_register() && arg1->reg().is(rdx)) {
873 // Wrong registers.
874 __ xchg(rax, rdx);
875 } else {
876 // Register rdx is free for arg0, which frees rax for arg1.
877 arg0->ToRegister(rdx);
878 arg1->ToRegister(rax);
879 }
880 } else {
881 // Register rax is free for arg1, which guarantees rdx is free for
882 // arg0.
883 arg1->ToRegister(rax);
884 arg0->ToRegister(rdx);
885 }
886
887 arg0->Unuse();
888 arg1->Unuse();
889 return RawCallStub(stub);
890 }
891
892
893 Result VirtualFrame::CallJSFunction(int arg_count) {
894 Result function = Pop();
895
896 // InvokeFunction requires function in rdi. Move it in there.
897 function.ToRegister(rdi);
898 function.Unuse();
899
900 // +1 for receiver.
901 PrepareForCall(arg_count + 1, arg_count + 1);
902 ASSERT(cgen()->HasValidEntryRegisters());
903 ParameterCount count(arg_count);
904 __ InvokeFunction(rdi, count, CALL_FUNCTION);
905 RestoreContextRegister();
906 Result result = cgen()->allocator()->Allocate(rax);
907 ASSERT(result.is_valid());
908 return result;
909 }
910
911
912 void VirtualFrame::SyncElementBelowStackPointer(int index) {
913 // Emit code to write elements below the stack pointer to their
914 // (already allocated) stack address.
915 ASSERT(index <= stack_pointer_);
916 FrameElement element = elements_[index];
917 ASSERT(!element.is_synced());
918 switch (element.type()) {
919 case FrameElement::INVALID:
920 break;
921
922 case FrameElement::MEMORY:
923 // This function should not be called with synced elements.
924 // (memory elements are always synced).
925 UNREACHABLE();
926 break;
927
928 case FrameElement::REGISTER:
929 __ movq(Operand(rbp, fp_relative(index)), element.reg());
930 break;
931
932 case FrameElement::CONSTANT:
933 __ Move(Operand(rbp, fp_relative(index)), element.handle());
934 break;
935
936 case FrameElement::COPY: {
937 int backing_index = element.index();
938 FrameElement backing_element = elements_[backing_index];
939 if (backing_element.is_memory()) {
940 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index)));
941 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
942 } else {
943 ASSERT(backing_element.is_register());
944 __ movq(Operand(rbp, fp_relative(index)), backing_element.reg());
945 }
946 break;
947 }
948 }
949 elements_[index].set_sync();
950 }
951
952
953 void VirtualFrame::SyncElementByPushing(int index) {
954 // Sync an element of the frame that is just above the stack pointer
955 // by pushing it.
956 ASSERT(index == stack_pointer_ + 1);
957 stack_pointer_++;
958 FrameElement element = elements_[index];
959
960 switch (element.type()) {
961 case FrameElement::INVALID:
962 __ Push(Smi::FromInt(0));
963 break;
964
965 case FrameElement::MEMORY:
966 // No memory elements exist above the stack pointer.
967 UNREACHABLE();
968 break;
969
970 case FrameElement::REGISTER:
971 __ push(element.reg());
972 break;
973
974 case FrameElement::CONSTANT:
975 __ Move(kScratchRegister, element.handle());
976 __ push(kScratchRegister);
977 break;
978
979 case FrameElement::COPY: {
980 int backing_index = element.index();
981 FrameElement backing = elements_[backing_index];
982 ASSERT(backing.is_memory() || backing.is_register());
983 if (backing.is_memory()) {
984 __ push(Operand(rbp, fp_relative(backing_index)));
985 } else {
986 __ push(backing.reg());
987 }
988 break;
989 }
990 }
991 elements_[index].set_sync();
992 }
993
994
995 // Clear the dirty bits for the range of elements in
996 // [min(stack_pointer_ + 1,begin), end].
997 void VirtualFrame::SyncRange(int begin, int end) {
998 ASSERT(begin >= 0);
999 ASSERT(end < element_count());
1000 // Sync elements below the range if they have not been materialized
1001 // on the stack.
1002 int start = Min(begin, stack_pointer_ + 1);
1003 int end_or_stack_pointer = Min(stack_pointer_, end);
1004 // Emit normal push instructions for elements above stack pointer
1005 // and use mov instructions if we are below stack pointer.
1006 int i = start;
1007
1008 while (i <= end_or_stack_pointer) {
1009 if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i);
1010 i++;
1011 }
1012 while (i <= end) {
1013 SyncElementByPushing(i);
1014 i++;
1015 }
1016 }
1017
1018
1019 //------------------------------------------------------------------------------
1020 // Virtual frame stub and IC calling functions.
1021
1022 Result VirtualFrame::CallRuntime(const Runtime::Function* f, int arg_count) {
1023 PrepareForCall(arg_count, arg_count);
1024 ASSERT(cgen()->HasValidEntryRegisters());
1025 __ CallRuntime(f, arg_count);
1026 Result result = cgen()->allocator()->Allocate(rax);
1027 ASSERT(result.is_valid());
1028 return result;
1029 }
1030
1031
1032 Result VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
1033 PrepareForCall(arg_count, arg_count);
1034 ASSERT(cgen()->HasValidEntryRegisters());
1035 __ CallRuntime(id, arg_count);
1036 Result result = cgen()->allocator()->Allocate(rax);
1037 ASSERT(result.is_valid());
1038 return result;
1039 }
1040
1041
1042 #ifdef ENABLE_DEBUGGER_SUPPORT
1043 void VirtualFrame::DebugBreak() {
1044 PrepareForCall(0, 0);
1045 ASSERT(cgen()->HasValidEntryRegisters());
1046 __ DebugBreak();
1047 Result result = cgen()->allocator()->Allocate(rax);
1048 ASSERT(result.is_valid());
1049 }
1050 #endif
1051
1052
1053 Result VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
1054 InvokeFlag flag,
1055 int arg_count) {
1056 PrepareForCall(arg_count, arg_count);
1057 ASSERT(cgen()->HasValidEntryRegisters());
1058 __ InvokeBuiltin(id, flag);
1059 Result result = cgen()->allocator()->Allocate(rax);
1060 ASSERT(result.is_valid());
1061 return result;
1062 }
1063
1064
1065 Result VirtualFrame::RawCallCodeObject(Handle<Code> code,
1066 RelocInfo::Mode rmode) {
1067 ASSERT(cgen()->HasValidEntryRegisters());
1068 __ Call(code, rmode);
1069 Result result = cgen()->allocator()->Allocate(rax);
1070 ASSERT(result.is_valid());
1071 return result;
1072 }
1073
1074
1075 // This function assumes that the only results that could be in a_reg or b_reg
1076 // are a and b. Other results can be live, but must not be in a_reg or b_reg.
1077 void VirtualFrame::MoveResultsToRegisters(Result* a,
1078 Result* b,
1079 Register a_reg,
1080 Register b_reg) {
1081 ASSERT(!a_reg.is(b_reg));
1082 // Assert that cgen()->allocator()->count(a_reg) is accounted for by a and b.
1083 ASSERT(cgen()->allocator()->count(a_reg) <= 2);
1084 ASSERT(cgen()->allocator()->count(a_reg) != 2 || a->reg().is(a_reg));
1085 ASSERT(cgen()->allocator()->count(a_reg) != 2 || b->reg().is(a_reg));
1086 ASSERT(cgen()->allocator()->count(a_reg) != 1 ||
1087 (a->is_register() && a->reg().is(a_reg)) ||
1088 (b->is_register() && b->reg().is(a_reg)));
1089 // Assert that cgen()->allocator()->count(b_reg) is accounted for by a and b.
1090 ASSERT(cgen()->allocator()->count(b_reg) <= 2);
1091 ASSERT(cgen()->allocator()->count(b_reg) != 2 || a->reg().is(b_reg));
1092 ASSERT(cgen()->allocator()->count(b_reg) != 2 || b->reg().is(b_reg));
1093 ASSERT(cgen()->allocator()->count(b_reg) != 1 ||
1094 (a->is_register() && a->reg().is(b_reg)) ||
1095 (b->is_register() && b->reg().is(b_reg)));
1096
1097 if (a->is_register() && a->reg().is(a_reg)) {
1098 b->ToRegister(b_reg);
1099 } else if (!cgen()->allocator()->is_used(a_reg)) {
1100 a->ToRegister(a_reg);
1101 b->ToRegister(b_reg);
1102 } else if (cgen()->allocator()->is_used(b_reg)) {
1103 // a must be in b_reg, b in a_reg.
1104 __ xchg(a_reg, b_reg);
1105 // Results a and b will be invalidated, so it is ok if they are switched.
1106 } else {
1107 b->ToRegister(b_reg);
1108 a->ToRegister(a_reg);
1109 }
1110 a->Unuse();
1111 b->Unuse();
1112 }
1113
1114
1115 Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) {
1116 // Name and receiver are on the top of the frame. Both are dropped.
1117 // The IC expects name in rcx and receiver in rax.
1118 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1119 Builtins::kLoadIC_Initialize));
1120 Result name = Pop();
1121 Result receiver = Pop();
1122 PrepareForCall(0, 0);
1123 MoveResultsToRegisters(&name, &receiver, rcx, rax);
1124
1125 return RawCallCodeObject(ic, mode);
1126 }
1127
1128
1129 Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
1130 // Key and receiver are on top of the frame. Put them in rax and rdx.
1131 Result key = Pop();
1132 Result receiver = Pop();
1133 PrepareForCall(0, 0);
1134 MoveResultsToRegisters(&key, &receiver, rax, rdx);
1135
1136 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1137 Builtins::kKeyedLoadIC_Initialize));
1138 return RawCallCodeObject(ic, mode);
1139 }
1140
1141
1142 Result VirtualFrame::CallStoreIC(Handle<String> name,
1143 bool is_contextual,
1144 StrictModeFlag strict_mode) {
1145 // Value and (if not contextual) receiver are on top of the frame.
1146 // The IC expects name in rcx, value in rax, and receiver in rdx.
1147 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1148 (strict_mode == kStrictMode) ? Builtins::kStoreIC_Initialize_Strict
1149 : Builtins::kStoreIC_Initialize));
1150 Result value = Pop();
1151 RelocInfo::Mode mode;
1152 if (is_contextual) {
1153 PrepareForCall(0, 0);
1154 value.ToRegister(rax);
1155 __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1156 value.Unuse();
1157 mode = RelocInfo::CODE_TARGET_CONTEXT;
1158 } else {
1159 Result receiver = Pop();
1160 PrepareForCall(0, 0);
1161 MoveResultsToRegisters(&value, &receiver, rax, rdx);
1162 mode = RelocInfo::CODE_TARGET;
1163 }
1164 __ Move(rcx, name);
1165 return RawCallCodeObject(ic, mode);
1166 }
1167
1168
1169 Result VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) {
1170 // Value, key, and receiver are on the top of the frame. The IC
1171 // expects value in rax, key in rcx, and receiver in rdx.
1172 Result value = Pop();
1173 Result key = Pop();
1174 Result receiver = Pop();
1175 PrepareForCall(0, 0);
1176 if (!cgen()->allocator()->is_used(rax) ||
1177 (value.is_register() && value.reg().is(rax))) {
1178 if (!cgen()->allocator()->is_used(rax)) {
1179 value.ToRegister(rax);
1180 }
1181 MoveResultsToRegisters(&key, &receiver, rcx, rdx);
1182 value.Unuse();
1183 } else if (!cgen()->allocator()->is_used(rcx) ||
1184 (key.is_register() && key.reg().is(rcx))) {
1185 if (!cgen()->allocator()->is_used(rcx)) {
1186 key.ToRegister(rcx);
1187 }
1188 MoveResultsToRegisters(&value, &receiver, rax, rdx);
1189 key.Unuse();
1190 } else if (!cgen()->allocator()->is_used(rdx) ||
1191 (receiver.is_register() && receiver.reg().is(rdx))) {
1192 if (!cgen()->allocator()->is_used(rdx)) {
1193 receiver.ToRegister(rdx);
1194 }
1195 MoveResultsToRegisters(&key, &value, rcx, rax);
1196 receiver.Unuse();
1197 } else {
1198 // All three registers are used, and no value is in the correct place.
1199 // We have one of the two circular permutations of rax, rcx, rdx.
1200 ASSERT(value.is_register());
1201 if (value.reg().is(rcx)) {
1202 __ xchg(rax, rdx);
1203 __ xchg(rax, rcx);
1204 } else {
1205 __ xchg(rax, rcx);
1206 __ xchg(rax, rdx);
1207 }
1208 value.Unuse();
1209 key.Unuse();
1210 receiver.Unuse();
1211 }
1212
1213 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1214 (strict_mode == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict
1215 : Builtins::kKeyedStoreIC_Initialize));
1216 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
1217 }
1218
1219
1220 Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
1221 int arg_count,
1222 int loop_nesting) {
1223 // Function name, arguments, and receiver are found on top of the frame
1224 // and dropped by the call. The IC expects the name in rcx and the rest
1225 // on the stack, and drops them all.
1226 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1227 Handle<Code> ic =
1228 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
1229 Result name = Pop();
1230 // Spill args, receiver, and function. The call will drop args and
1231 // receiver.
1232 PrepareForCall(arg_count + 1, arg_count + 1);
1233 name.ToRegister(rcx);
1234 name.Unuse();
1235 return RawCallCodeObject(ic, mode);
1236 }
1237
1238
1239 Result VirtualFrame::CallKeyedCallIC(RelocInfo::Mode mode,
1240 int arg_count,
1241 int loop_nesting) {
1242 // Function name, arguments, and receiver are found on top of the frame
1243 // and dropped by the call. The IC expects the name in rcx and the rest
1244 // on the stack, and drops them all.
1245 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
1246 Handle<Code> ic =
1247 ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
1248 Result name = Pop();
1249 // Spill args, receiver, and function. The call will drop args and
1250 // receiver.
1251 PrepareForCall(arg_count + 1, arg_count + 1);
1252 name.ToRegister(rcx);
1253 name.Unuse();
1254 return RawCallCodeObject(ic, mode);
1255 }
1256
1257
1258 Result VirtualFrame::CallConstructor(int arg_count) {
1259 // Arguments, receiver, and function are on top of the frame. The
1260 // IC expects arg count in rax, function in rdi, and the arguments
1261 // and receiver on the stack.
1262 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1263 Builtins::kJSConstructCall));
1264 // Duplicate the function before preparing the frame.
1265 PushElementAt(arg_count);
1266 Result function = Pop();
1267 PrepareForCall(arg_count + 1, arg_count + 1); // Spill function and args.
1268 function.ToRegister(rdi);
1269
1270 // Constructors are called with the number of arguments in register
1271 // rax for now. Another option would be to have separate construct
1272 // call trampolines per different arguments counts encountered.
1273 Result num_args = cgen()->allocator()->Allocate(rax);
1274 ASSERT(num_args.is_valid());
1275 __ Set(num_args.reg(), arg_count);
1276
1277 function.Unuse();
1278 num_args.Unuse();
1279 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL);
1280 }
1281
1282
1283 void VirtualFrame::PushTryHandler(HandlerType type) {
1284 ASSERT(cgen()->HasValidEntryRegisters());
1285 // Grow the expression stack by handler size less one (the return
1286 // address is already pushed by a call instruction).
1287 Adjust(kHandlerSize - 1);
1288 __ PushTryHandler(IN_JAVASCRIPT, type);
1289 }
1290
1291
1292 #undef __
1293
1294 } } // namespace v8::internal
1295
1296 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/virtual-frame-x64.h ('k') | test/cctest/test-log-stack-tracer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698