OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
45 : Assembler(buffer, size), | 45 : Assembler(buffer, size), |
46 generating_stub_(false), | 46 generating_stub_(false), |
47 allow_stub_calls_(true), | 47 allow_stub_calls_(true), |
48 code_object_(Heap::undefined_value()) { | 48 code_object_(Heap::undefined_value()) { |
49 } | 49 } |
50 | 50 |
51 | 51 |
52 void MacroAssembler::RecordWriteHelper(Register object, | 52 void MacroAssembler::RecordWriteHelper(Register object, |
53 Register addr, | 53 Register addr, |
54 Register scratch) { | 54 Register scratch) { |
55 if (FLAG_debug_code) { | 55 if (emit_debug_code()) { |
56 // Check that the object is not in new space. | 56 // Check that the object is not in new space. |
57 Label not_in_new_space; | 57 Label not_in_new_space; |
58 InNewSpace(object, scratch, not_equal, ¬_in_new_space); | 58 InNewSpace(object, scratch, not_equal, ¬_in_new_space); |
59 Abort("new-space object passed to RecordWriteHelper"); | 59 Abort("new-space object passed to RecordWriteHelper"); |
60 bind(¬_in_new_space); | 60 bind(¬_in_new_space); |
61 } | 61 } |
62 | 62 |
63 // Compute the page start address from the heap object pointer, and reuse | 63 // Compute the page start address from the heap object pointer, and reuse |
64 // the 'object' register for it. | 64 // the 'object' register for it. |
65 and_(object, ~Page::kPageAlignmentMask); | 65 and_(object, ~Page::kPageAlignmentMask); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
106 ASSERT_EQ(0, kSmiTag); | 106 ASSERT_EQ(0, kSmiTag); |
107 lea(dst, Operand(object, dst, times_half_pointer_size, | 107 lea(dst, Operand(object, dst, times_half_pointer_size, |
108 FixedArray::kHeaderSize - kHeapObjectTag)); | 108 FixedArray::kHeaderSize - kHeapObjectTag)); |
109 } | 109 } |
110 RecordWriteHelper(object, dst, value); | 110 RecordWriteHelper(object, dst, value); |
111 | 111 |
112 bind(&done); | 112 bind(&done); |
113 | 113 |
114 // Clobber all input registers when running with the debug-code flag | 114 // Clobber all input registers when running with the debug-code flag |
115 // turned on to provoke errors. | 115 // turned on to provoke errors. |
116 if (FLAG_debug_code) { | 116 if (emit_debug_code()) { |
117 mov(object, Immediate(BitCast<int32_t>(kZapValue))); | 117 mov(object, Immediate(BitCast<int32_t>(kZapValue))); |
118 mov(value, Immediate(BitCast<int32_t>(kZapValue))); | 118 mov(value, Immediate(BitCast<int32_t>(kZapValue))); |
119 mov(scratch, Immediate(BitCast<int32_t>(kZapValue))); | 119 mov(scratch, Immediate(BitCast<int32_t>(kZapValue))); |
120 } | 120 } |
121 } | 121 } |
122 | 122 |
123 | 123 |
124 void MacroAssembler::RecordWrite(Register object, | 124 void MacroAssembler::RecordWrite(Register object, |
125 Register address, | 125 Register address, |
126 Register value) { | 126 Register value) { |
127 // First, check if a write barrier is even needed. The tests below | 127 // First, check if a write barrier is even needed. The tests below |
128 // catch stores of Smis and stores into young gen. | 128 // catch stores of Smis and stores into young gen. |
129 Label done; | 129 Label done; |
130 | 130 |
131 // Skip barrier if writing a smi. | 131 // Skip barrier if writing a smi. |
132 ASSERT_EQ(0, kSmiTag); | 132 ASSERT_EQ(0, kSmiTag); |
133 test(value, Immediate(kSmiTagMask)); | 133 test(value, Immediate(kSmiTagMask)); |
134 j(zero, &done); | 134 j(zero, &done); |
135 | 135 |
136 InNewSpace(object, value, equal, &done); | 136 InNewSpace(object, value, equal, &done); |
137 | 137 |
138 RecordWriteHelper(object, address, value); | 138 RecordWriteHelper(object, address, value); |
139 | 139 |
140 bind(&done); | 140 bind(&done); |
141 | 141 |
142 // Clobber all input registers when running with the debug-code flag | 142 // Clobber all input registers when running with the debug-code flag |
143 // turned on to provoke errors. | 143 // turned on to provoke errors. |
144 if (FLAG_debug_code) { | 144 if (emit_debug_code()) { |
145 mov(object, Immediate(BitCast<int32_t>(kZapValue))); | 145 mov(object, Immediate(BitCast<int32_t>(kZapValue))); |
146 mov(address, Immediate(BitCast<int32_t>(kZapValue))); | 146 mov(address, Immediate(BitCast<int32_t>(kZapValue))); |
147 mov(value, Immediate(BitCast<int32_t>(kZapValue))); | 147 mov(value, Immediate(BitCast<int32_t>(kZapValue))); |
148 } | 148 } |
149 } | 149 } |
150 | 150 |
151 | 151 |
152 #ifdef ENABLE_DEBUGGER_SUPPORT | 152 #ifdef ENABLE_DEBUGGER_SUPPORT |
153 void MacroAssembler::DebugBreak() { | 153 void MacroAssembler::DebugBreak() { |
154 Set(eax, Immediate(0)); | 154 Set(eax, Immediate(0)); |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
278 Assert(not_equal, "Operand is a smi"); | 278 Assert(not_equal, "Operand is a smi"); |
279 } | 279 } |
280 | 280 |
281 | 281 |
282 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 282 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
283 push(ebp); | 283 push(ebp); |
284 mov(ebp, Operand(esp)); | 284 mov(ebp, Operand(esp)); |
285 push(esi); | 285 push(esi); |
286 push(Immediate(Smi::FromInt(type))); | 286 push(Immediate(Smi::FromInt(type))); |
287 push(Immediate(CodeObject())); | 287 push(Immediate(CodeObject())); |
288 if (FLAG_debug_code) { | 288 if (emit_debug_code()) { |
289 cmp(Operand(esp, 0), Immediate(Factory::undefined_value())); | 289 cmp(Operand(esp, 0), Immediate(Factory::undefined_value())); |
290 Check(not_equal, "code object not properly patched"); | 290 Check(not_equal, "code object not properly patched"); |
291 } | 291 } |
292 } | 292 } |
293 | 293 |
294 | 294 |
295 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 295 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
296 if (FLAG_debug_code) { | 296 if (emit_debug_code()) { |
297 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), | 297 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset), |
298 Immediate(Smi::FromInt(type))); | 298 Immediate(Smi::FromInt(type))); |
299 Check(equal, "stack frame types must match"); | 299 Check(equal, "stack frame types must match"); |
300 } | 300 } |
301 leave(); | 301 leave(); |
302 } | 302 } |
303 | 303 |
304 | 304 |
305 void MacroAssembler::EnterExitFramePrologue() { | 305 void MacroAssembler::EnterExitFramePrologue() { |
306 // Setup the frame structure on the stack. | 306 // Setup the frame structure on the stack. |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
543 Register scratch, | 543 Register scratch, |
544 Label* miss) { | 544 Label* miss) { |
545 Label same_contexts; | 545 Label same_contexts; |
546 | 546 |
547 ASSERT(!holder_reg.is(scratch)); | 547 ASSERT(!holder_reg.is(scratch)); |
548 | 548 |
549 // Load current lexical context from the stack frame. | 549 // Load current lexical context from the stack frame. |
550 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset)); | 550 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset)); |
551 | 551 |
552 // When generating debug code, make sure the lexical context is set. | 552 // When generating debug code, make sure the lexical context is set. |
553 if (FLAG_debug_code) { | 553 if (emit_debug_code()) { |
554 cmp(Operand(scratch), Immediate(0)); | 554 cmp(Operand(scratch), Immediate(0)); |
555 Check(not_equal, "we should not have an empty lexical context"); | 555 Check(not_equal, "we should not have an empty lexical context"); |
556 } | 556 } |
557 // Load the global context of the current context. | 557 // Load the global context of the current context. |
558 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 558 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
559 mov(scratch, FieldOperand(scratch, offset)); | 559 mov(scratch, FieldOperand(scratch, offset)); |
560 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); | 560 mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); |
561 | 561 |
562 // Check the context is a global context. | 562 // Check the context is a global context. |
563 if (FLAG_debug_code) { | 563 if (emit_debug_code()) { |
564 push(scratch); | 564 push(scratch); |
565 // Read the first word and compare to global_context_map. | 565 // Read the first word and compare to global_context_map. |
566 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 566 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
567 cmp(scratch, Factory::global_context_map()); | 567 cmp(scratch, Factory::global_context_map()); |
568 Check(equal, "JSGlobalObject::global_context should be a global context."); | 568 Check(equal, "JSGlobalObject::global_context should be a global context."); |
569 pop(scratch); | 569 pop(scratch); |
570 } | 570 } |
571 | 571 |
572 // Check if both contexts are the same. | 572 // Check if both contexts are the same. |
573 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 573 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
574 j(equal, &same_contexts, taken); | 574 j(equal, &same_contexts, taken); |
575 | 575 |
576 // Compare security tokens, save holder_reg on the stack so we can use it | 576 // Compare security tokens, save holder_reg on the stack so we can use it |
577 // as a temporary register. | 577 // as a temporary register. |
578 // | 578 // |
579 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | 579 // TODO(119): avoid push(holder_reg)/pop(holder_reg) |
580 push(holder_reg); | 580 push(holder_reg); |
581 // Check that the security token in the calling global object is | 581 // Check that the security token in the calling global object is |
582 // compatible with the security token in the receiving global | 582 // compatible with the security token in the receiving global |
583 // object. | 583 // object. |
584 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); | 584 mov(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); |
585 | 585 |
586 // Check the context is a global context. | 586 // Check the context is a global context. |
587 if (FLAG_debug_code) { | 587 if (emit_debug_code()) { |
588 cmp(holder_reg, Factory::null_value()); | 588 cmp(holder_reg, Factory::null_value()); |
589 Check(not_equal, "JSGlobalProxy::context() should not be null."); | 589 Check(not_equal, "JSGlobalProxy::context() should not be null."); |
590 | 590 |
591 push(holder_reg); | 591 push(holder_reg); |
592 // Read the first word and compare to global_context_map(), | 592 // Read the first word and compare to global_context_map(), |
593 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); | 593 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); |
594 cmp(holder_reg, Factory::global_context_map()); | 594 cmp(holder_reg, Factory::global_context_map()); |
595 Check(equal, "JSGlobalObject::global_context should be a global context."); | 595 Check(equal, "JSGlobalObject::global_context should be a global context."); |
596 pop(holder_reg); | 596 pop(holder_reg); |
597 } | 597 } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
630 mov(result, Operand::StaticVariable(new_space_allocation_top)); | 630 mov(result, Operand::StaticVariable(new_space_allocation_top)); |
631 } else { | 631 } else { |
632 mov(Operand(scratch), Immediate(new_space_allocation_top)); | 632 mov(Operand(scratch), Immediate(new_space_allocation_top)); |
633 mov(result, Operand(scratch, 0)); | 633 mov(result, Operand(scratch, 0)); |
634 } | 634 } |
635 } | 635 } |
636 | 636 |
637 | 637 |
638 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, | 638 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
639 Register scratch) { | 639 Register scratch) { |
640 if (FLAG_debug_code) { | 640 if (emit_debug_code()) { |
641 test(result_end, Immediate(kObjectAlignmentMask)); | 641 test(result_end, Immediate(kObjectAlignmentMask)); |
642 Check(zero, "Unaligned allocation in new space"); | 642 Check(zero, "Unaligned allocation in new space"); |
643 } | 643 } |
644 | 644 |
645 ExternalReference new_space_allocation_top = | 645 ExternalReference new_space_allocation_top = |
646 ExternalReference::new_space_allocation_top_address(); | 646 ExternalReference::new_space_allocation_top_address(); |
647 | 647 |
648 // Update new top. Use scratch if available. | 648 // Update new top. Use scratch if available. |
649 if (scratch.is(no_reg)) { | 649 if (scratch.is(no_reg)) { |
650 mov(Operand::StaticVariable(new_space_allocation_top), result_end); | 650 mov(Operand::StaticVariable(new_space_allocation_top), result_end); |
651 } else { | 651 } else { |
652 mov(Operand(scratch, 0), result_end); | 652 mov(Operand(scratch, 0), result_end); |
653 } | 653 } |
654 } | 654 } |
655 | 655 |
656 | 656 |
657 void MacroAssembler::AllocateInNewSpace(int object_size, | 657 void MacroAssembler::AllocateInNewSpace(int object_size, |
658 Register result, | 658 Register result, |
659 Register result_end, | 659 Register result_end, |
660 Register scratch, | 660 Register scratch, |
661 Label* gc_required, | 661 Label* gc_required, |
662 AllocationFlags flags) { | 662 AllocationFlags flags) { |
663 if (!FLAG_inline_new) { | 663 if (!FLAG_inline_new) { |
664 if (FLAG_debug_code) { | 664 if (emit_debug_code()) { |
665 // Trash the registers to simulate an allocation failure. | 665 // Trash the registers to simulate an allocation failure. |
666 mov(result, Immediate(0x7091)); | 666 mov(result, Immediate(0x7091)); |
667 if (result_end.is_valid()) { | 667 if (result_end.is_valid()) { |
668 mov(result_end, Immediate(0x7191)); | 668 mov(result_end, Immediate(0x7191)); |
669 } | 669 } |
670 if (scratch.is_valid()) { | 670 if (scratch.is_valid()) { |
671 mov(scratch, Immediate(0x7291)); | 671 mov(scratch, Immediate(0x7291)); |
672 } | 672 } |
673 } | 673 } |
674 jmp(gc_required); | 674 jmp(gc_required); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
711 | 711 |
712 void MacroAssembler::AllocateInNewSpace(int header_size, | 712 void MacroAssembler::AllocateInNewSpace(int header_size, |
713 ScaleFactor element_size, | 713 ScaleFactor element_size, |
714 Register element_count, | 714 Register element_count, |
715 Register result, | 715 Register result, |
716 Register result_end, | 716 Register result_end, |
717 Register scratch, | 717 Register scratch, |
718 Label* gc_required, | 718 Label* gc_required, |
719 AllocationFlags flags) { | 719 AllocationFlags flags) { |
720 if (!FLAG_inline_new) { | 720 if (!FLAG_inline_new) { |
721 if (FLAG_debug_code) { | 721 if (emit_debug_code()) { |
722 // Trash the registers to simulate an allocation failure. | 722 // Trash the registers to simulate an allocation failure. |
723 mov(result, Immediate(0x7091)); | 723 mov(result, Immediate(0x7091)); |
724 mov(result_end, Immediate(0x7191)); | 724 mov(result_end, Immediate(0x7191)); |
725 if (scratch.is_valid()) { | 725 if (scratch.is_valid()) { |
726 mov(scratch, Immediate(0x7291)); | 726 mov(scratch, Immediate(0x7291)); |
727 } | 727 } |
728 // Register element_count is not modified by the function. | 728 // Register element_count is not modified by the function. |
729 } | 729 } |
730 jmp(gc_required); | 730 jmp(gc_required); |
731 return; | 731 return; |
(...skipping 25 matching lines...) Expand all Loading... |
757 } | 757 } |
758 | 758 |
759 | 759 |
760 void MacroAssembler::AllocateInNewSpace(Register object_size, | 760 void MacroAssembler::AllocateInNewSpace(Register object_size, |
761 Register result, | 761 Register result, |
762 Register result_end, | 762 Register result_end, |
763 Register scratch, | 763 Register scratch, |
764 Label* gc_required, | 764 Label* gc_required, |
765 AllocationFlags flags) { | 765 AllocationFlags flags) { |
766 if (!FLAG_inline_new) { | 766 if (!FLAG_inline_new) { |
767 if (FLAG_debug_code) { | 767 if (emit_debug_code()) { |
768 // Trash the registers to simulate an allocation failure. | 768 // Trash the registers to simulate an allocation failure. |
769 mov(result, Immediate(0x7091)); | 769 mov(result, Immediate(0x7091)); |
770 mov(result_end, Immediate(0x7191)); | 770 mov(result_end, Immediate(0x7191)); |
771 if (scratch.is_valid()) { | 771 if (scratch.is_valid()) { |
772 mov(scratch, Immediate(0x7291)); | 772 mov(scratch, Immediate(0x7291)); |
773 } | 773 } |
774 // object_size is left unchanged by this function. | 774 // object_size is left unchanged by this function. |
775 } | 775 } |
776 jmp(gc_required); | 776 jmp(gc_required); |
777 return; | 777 return; |
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1313 // 1: arg1 | 1313 // 1: arg1 |
1314 // 0: pointer to the output cell | 1314 // 0: pointer to the output cell |
1315 // | 1315 // |
1316 // Note that this is one more "argument" than the function expects | 1316 // Note that this is one more "argument" than the function expects |
1317 // so the out cell will have to be popped explicitly after returning | 1317 // so the out cell will have to be popped explicitly after returning |
1318 // from the function. The out cell contains Handle. | 1318 // from the function. The out cell contains Handle. |
1319 | 1319 |
1320 // pointer to out cell. | 1320 // pointer to out cell. |
1321 lea(scratch, Operand(esp, (argc + 1) * kPointerSize)); | 1321 lea(scratch, Operand(esp, (argc + 1) * kPointerSize)); |
1322 mov(Operand(esp, 0 * kPointerSize), scratch); // output. | 1322 mov(Operand(esp, 0 * kPointerSize), scratch); // output. |
1323 if (FLAG_debug_code) { | 1323 if (emit_debug_code()) { |
1324 mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell. | 1324 mov(Operand(esp, (argc + 1) * kPointerSize), Immediate(0)); // out cell. |
1325 } | 1325 } |
1326 } | 1326 } |
1327 } | 1327 } |
1328 | 1328 |
1329 | 1329 |
1330 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function, | 1330 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function, |
1331 int stack_space) { | 1331 int stack_space) { |
1332 ExternalReference next_address = | 1332 ExternalReference next_address = |
1333 ExternalReference::handle_scope_next_address(); | 1333 ExternalReference::handle_scope_next_address(); |
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1614 // Slot is in the current function context. Move it into the | 1614 // Slot is in the current function context. Move it into the |
1615 // destination register in case we store into it (the write barrier | 1615 // destination register in case we store into it (the write barrier |
1616 // cannot be allowed to destroy the context in esi). | 1616 // cannot be allowed to destroy the context in esi). |
1617 mov(dst, esi); | 1617 mov(dst, esi); |
1618 } | 1618 } |
1619 | 1619 |
1620 // We should not have found a 'with' context by walking the context chain | 1620 // We should not have found a 'with' context by walking the context chain |
1621 // (i.e., the static scope chain and runtime context chain do not agree). | 1621 // (i.e., the static scope chain and runtime context chain do not agree). |
1622 // A variable occurring in such a scope should have slot type LOOKUP and | 1622 // A variable occurring in such a scope should have slot type LOOKUP and |
1623 // not CONTEXT. | 1623 // not CONTEXT. |
1624 if (FLAG_debug_code) { | 1624 if (emit_debug_code()) { |
1625 cmp(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 1625 cmp(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
1626 Check(equal, "Yo dawg, I heard you liked function contexts " | 1626 Check(equal, "Yo dawg, I heard you liked function contexts " |
1627 "so I put function contexts in all your contexts"); | 1627 "so I put function contexts in all your contexts"); |
1628 } | 1628 } |
1629 } | 1629 } |
1630 | 1630 |
1631 | 1631 |
1632 void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 1632 void MacroAssembler::LoadGlobalFunction(int index, Register function) { |
1633 // Load the global or builtins object from the current context. | 1633 // Load the global or builtins object from the current context. |
1634 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 1634 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
1635 // Load the global context from the global or builtins object. | 1635 // Load the global context from the global or builtins object. |
1636 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); | 1636 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); |
1637 // Load the function from the global context. | 1637 // Load the function from the global context. |
1638 mov(function, Operand(function, Context::SlotOffset(index))); | 1638 mov(function, Operand(function, Context::SlotOffset(index))); |
1639 } | 1639 } |
1640 | 1640 |
1641 | 1641 |
1642 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 1642 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
1643 Register map) { | 1643 Register map) { |
1644 // Load the initial map. The global functions all have initial maps. | 1644 // Load the initial map. The global functions all have initial maps. |
1645 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 1645 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
1646 if (FLAG_debug_code) { | 1646 if (emit_debug_code()) { |
1647 Label ok, fail; | 1647 Label ok, fail; |
1648 CheckMap(map, Factory::meta_map(), &fail, false); | 1648 CheckMap(map, Factory::meta_map(), &fail, false); |
1649 jmp(&ok); | 1649 jmp(&ok); |
1650 bind(&fail); | 1650 bind(&fail); |
1651 Abort("Global functions must have initial map"); | 1651 Abort("Global functions must have initial map"); |
1652 bind(&ok); | 1652 bind(&ok); |
1653 } | 1653 } |
1654 } | 1654 } |
1655 | 1655 |
1656 | 1656 |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1780 j(NegateCondition(cc), &skip); | 1780 j(NegateCondition(cc), &skip); |
1781 pushfd(); | 1781 pushfd(); |
1782 DecrementCounter(counter, value); | 1782 DecrementCounter(counter, value); |
1783 popfd(); | 1783 popfd(); |
1784 bind(&skip); | 1784 bind(&skip); |
1785 } | 1785 } |
1786 } | 1786 } |
1787 | 1787 |
1788 | 1788 |
1789 void MacroAssembler::Assert(Condition cc, const char* msg) { | 1789 void MacroAssembler::Assert(Condition cc, const char* msg) { |
1790 if (FLAG_debug_code) Check(cc, msg); | 1790 if (emit_debug_code()) Check(cc, msg); |
1791 } | 1791 } |
1792 | 1792 |
1793 | 1793 |
1794 void MacroAssembler::AssertFastElements(Register elements) { | 1794 void MacroAssembler::AssertFastElements(Register elements) { |
1795 if (FLAG_debug_code) { | 1795 if (emit_debug_code()) { |
1796 Label ok; | 1796 Label ok; |
1797 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 1797 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
1798 Immediate(Factory::fixed_array_map())); | 1798 Immediate(Factory::fixed_array_map())); |
1799 j(equal, &ok); | 1799 j(equal, &ok); |
1800 cmp(FieldOperand(elements, HeapObject::kMapOffset), | 1800 cmp(FieldOperand(elements, HeapObject::kMapOffset), |
1801 Immediate(Factory::fixed_cow_array_map())); | 1801 Immediate(Factory::fixed_cow_array_map())); |
1802 j(equal, &ok); | 1802 j(equal, &ok); |
1803 Abort("JSObject with fast elements map has slow elements"); | 1803 Abort("JSObject with fast elements map has slow elements"); |
1804 bind(&ok); | 1804 bind(&ok); |
1805 } | 1805 } |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1853 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); | 1853 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); |
1854 CallRuntime(Runtime::kAbort, 2); | 1854 CallRuntime(Runtime::kAbort, 2); |
1855 // will not return here | 1855 // will not return here |
1856 int3(); | 1856 int3(); |
1857 } | 1857 } |
1858 | 1858 |
1859 | 1859 |
1860 void MacroAssembler::JumpIfNotNumber(Register reg, | 1860 void MacroAssembler::JumpIfNotNumber(Register reg, |
1861 TypeInfo info, | 1861 TypeInfo info, |
1862 Label* on_not_number) { | 1862 Label* on_not_number) { |
1863 if (FLAG_debug_code) AbortIfSmi(reg); | 1863 if (emit_debug_code()) AbortIfSmi(reg); |
1864 if (!info.IsNumber()) { | 1864 if (!info.IsNumber()) { |
1865 cmp(FieldOperand(reg, HeapObject::kMapOffset), | 1865 cmp(FieldOperand(reg, HeapObject::kMapOffset), |
1866 Factory::heap_number_map()); | 1866 Factory::heap_number_map()); |
1867 j(not_equal, on_not_number); | 1867 j(not_equal, on_not_number); |
1868 } | 1868 } |
1869 } | 1869 } |
1870 | 1870 |
1871 | 1871 |
1872 void MacroAssembler::ConvertToInt32(Register dst, | 1872 void MacroAssembler::ConvertToInt32(Register dst, |
1873 Register source, | 1873 Register source, |
1874 Register scratch, | 1874 Register scratch, |
1875 TypeInfo info, | 1875 TypeInfo info, |
1876 Label* on_not_int32) { | 1876 Label* on_not_int32) { |
1877 if (FLAG_debug_code) { | 1877 if (emit_debug_code()) { |
1878 AbortIfSmi(source); | 1878 AbortIfSmi(source); |
1879 AbortIfNotNumber(source); | 1879 AbortIfNotNumber(source); |
1880 } | 1880 } |
1881 if (info.IsInteger32()) { | 1881 if (info.IsInteger32()) { |
1882 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset)); | 1882 cvttsd2si(dst, FieldOperand(source, HeapNumber::kValueOffset)); |
1883 } else { | 1883 } else { |
1884 Label done; | 1884 Label done; |
1885 bool push_pop = (scratch.is(no_reg) && dst.is(source)); | 1885 bool push_pop = (scratch.is(no_reg) && dst.is(source)); |
1886 ASSERT(!scratch.is(source)); | 1886 ASSERT(!scratch.is(source)); |
1887 if (push_pop) { | 1887 if (push_pop) { |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1987 int num_arguments) { | 1987 int num_arguments) { |
1988 // Trashing eax is ok as it will be the return value. | 1988 // Trashing eax is ok as it will be the return value. |
1989 mov(Operand(eax), Immediate(function)); | 1989 mov(Operand(eax), Immediate(function)); |
1990 CallCFunction(eax, num_arguments); | 1990 CallCFunction(eax, num_arguments); |
1991 } | 1991 } |
1992 | 1992 |
1993 | 1993 |
1994 void MacroAssembler::CallCFunction(Register function, | 1994 void MacroAssembler::CallCFunction(Register function, |
1995 int num_arguments) { | 1995 int num_arguments) { |
1996 // Check stack alignment. | 1996 // Check stack alignment. |
1997 if (FLAG_debug_code) { | 1997 if (emit_debug_code()) { |
1998 CheckStackAlignment(); | 1998 CheckStackAlignment(); |
1999 } | 1999 } |
2000 | 2000 |
2001 call(Operand(function)); | 2001 call(Operand(function)); |
2002 if (OS::ActivationFrameAlignment() != 0) { | 2002 if (OS::ActivationFrameAlignment() != 0) { |
2003 mov(esp, Operand(esp, num_arguments * kPointerSize)); | 2003 mov(esp, Operand(esp, num_arguments * kPointerSize)); |
2004 } else { | 2004 } else { |
2005 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); | 2005 add(Operand(esp), Immediate(num_arguments * sizeof(int32_t))); |
2006 } | 2006 } |
2007 } | 2007 } |
(...skipping 14 matching lines...) Expand all Loading... |
2022 | 2022 |
2023 // Check that the code was patched as expected. | 2023 // Check that the code was patched as expected. |
2024 ASSERT(masm_.pc_ == address_ + size_); | 2024 ASSERT(masm_.pc_ == address_ + size_); |
2025 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2025 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2026 } | 2026 } |
2027 | 2027 |
2028 | 2028 |
2029 } } // namespace v8::internal | 2029 } } // namespace v8::internal |
2030 | 2030 |
2031 #endif // V8_TARGET_ARCH_IA32 | 2031 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |