OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4782 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4793 __ Push(node_->pattern()); | 4793 __ Push(node_->pattern()); |
4794 // RegExp flags (3). | 4794 // RegExp flags (3). |
4795 __ Push(node_->flags()); | 4795 __ Push(node_->flags()); |
4796 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 4796 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
4797 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); | 4797 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); |
4798 } | 4798 } |
4799 | 4799 |
4800 | 4800 |
4801 class DeferredAllocateInNewSpace: public DeferredCode { | 4801 class DeferredAllocateInNewSpace: public DeferredCode { |
4802 public: | 4802 public: |
4803 DeferredAllocateInNewSpace(int size, Register target) | 4803 DeferredAllocateInNewSpace(int size, |
4804 : size_(size), target_(target) { | 4804 Register target, |
4805 int registers_to_save = 0) | |
4806 : size_(size), target_(target), registers_to_save_(registers_to_save) { | |
4805 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); | 4807 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); |
4806 set_comment("[ DeferredAllocateInNewSpace"); | 4808 set_comment("[ DeferredAllocateInNewSpace"); |
4807 } | 4809 } |
4808 void Generate(); | 4810 void Generate(); |
4809 | 4811 |
4810 private: | 4812 private: |
4811 int size_; | 4813 int size_; |
4812 Register target_; | 4814 Register target_; |
4815 int registers_to_save_; | |
4813 }; | 4816 }; |
4814 | 4817 |
4815 | 4818 |
4816 void DeferredAllocateInNewSpace::Generate() { | 4819 void DeferredAllocateInNewSpace::Generate() { |
4820 for (int i = 0; i < kNumRegs; i++) { | |
4821 if (registers_to_save_ & (1 << i)) { | |
4822 Register save_register = { i }; | |
4823 __ push(save_register); | |
4824 } | |
4825 } | |
4817 __ Push(Smi::FromInt(size_)); | 4826 __ Push(Smi::FromInt(size_)); |
4818 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 4827 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
4819 if (!target_.is(rax)) { | 4828 if (!target_.is(rax)) { |
4820 __ movq(target_, rax); | 4829 __ movq(target_, rax); |
4821 } | 4830 } |
4831 for (int i = kNumRegs - 1; i >= 0; i--) { | |
4832 if (registers_to_save_ & (1 << i)) { | |
4833 Register save_register = { i }; | |
4834 __ push(save_register); | |
Vitaly Repeshko
2010/08/20 13:29:49
push -> pop.
Lasse Reichstein
2010/08/23 12:52:11
Double-doh!
Fixed, for real.
| |
4835 } | |
4836 } | |
4822 } | 4837 } |
4823 | 4838 |
4824 | 4839 |
4825 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { | 4840 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { |
4826 Comment cmnt(masm_, "[ RegExp Literal"); | 4841 Comment cmnt(masm_, "[ RegExp Literal"); |
4827 | 4842 |
4828 // Retrieve the literals array and check the allocated entry. Begin | 4843 // Retrieve the literals array and check the allocated entry. Begin |
4829 // with a writable copy of the function of this activation in a | 4844 // with a writable copy of the function of this activation in a |
4830 // register. | 4845 // register. |
4831 frame_->PushFunction(); | 4846 frame_->PushFunction(); |
(...skipping 1767 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6599 __ bind(&slowcase); | 6614 __ bind(&slowcase); |
6600 __ CallRuntime(Runtime::kRegExpConstructResult, 3); | 6615 __ CallRuntime(Runtime::kRegExpConstructResult, 3); |
6601 | 6616 |
6602 __ bind(&done); | 6617 __ bind(&done); |
6603 } | 6618 } |
6604 frame_->Forget(3); | 6619 frame_->Forget(3); |
6605 frame_->Push(rax); | 6620 frame_->Push(rax); |
6606 } | 6621 } |
6607 | 6622 |
6608 | 6623 |
6624 void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) { | |
6625 ASSERT_EQ(1, args->length()); | |
6626 | |
6627 Load(args->at(0)); | |
6628 Result object_result = frame_->Pop(); | |
6629 object_result.ToRegister(rax); | |
6630 object_result.Unuse(); | |
6631 { | |
6632 VirtualFrame::SpilledScope spilled_scope; | |
6633 | |
6634 Label done; | |
6635 __ JumpIfSmi(rax, &done); | |
6636 | |
6637 // Load JSRegExpResult map into rdx. | |
6638 // Arguments to this function should be results of calling RegExp exec, | |
6639 // which is either an unmodified JSRegExpResult or null. Anything not having | |
6640 // the unmodified JSRegExpResult map is returned unmodified. | |
6641 // This also ensures that elements are fast. | |
6642 | |
6643 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); | |
6644 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset)); | |
6645 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); | |
6646 __ cmpq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); | |
6647 __ j(not_equal, &done); | |
6648 | |
6649 DeferredAllocateInNewSpace* allocate_fallback = | |
6650 new DeferredAllocateInNewSpace(JSRegExpResult::kSize, | |
6651 rbx, | |
6652 rdx.bit() | rax.bit()); | |
6653 | |
6654 // All set, copy the contents to a new object. | |
6655 __ AllocateInNewSpace(JSRegExpResult::kSize, | |
6656 rbx, | |
6657 no_reg, | |
6658 no_reg, | |
6659 allocate_fallback->entry_label(), | |
6660 TAG_OBJECT); | |
6661 __ bind(allocate_fallback->exit_label()); | |
6662 | |
6663 STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0); | |
6664 // There is an even number of fields, so unroll the loop once | |
6665 // for efficiency. | |
6666 for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) { | |
6667 STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0); | |
6668 if (i != JSObject::kMapOffset) { | |
6669 // The map was already loaded into edx. | |
6670 __ movq(rdx, FieldOperand(rax, i)); | |
6671 } | |
6672 __ movq(rcx, FieldOperand(rax, i + kPointerSize)); | |
6673 | |
6674 STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0); | |
6675 if (i == JSObject::kElementsOffset) { | |
6676 // If the elements array isn't empty, make it copy-on-write | |
6677 // before copying it. | |
6678 Label empty; | |
6679 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex); | |
6680 __ j(equal, &empty); | |
6681 ASSERT(!Heap::InNewSpace(Heap::fixed_cow_array_map())); | |
6682 __ LoadRoot(kScratchRegister, Heap::kFixedCOWArrayMapRootIndex); | |
6683 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), kScratchRegister); | |
6684 __ bind(&empty); | |
6685 } | |
6686 __ movq(FieldOperand(rbx, i), rdx); | |
6687 __ movq(FieldOperand(rbx, i + kPointerSize), rcx); | |
6688 } | |
6689 __ movq(rax, rbx); | |
6690 | |
6691 __ bind(&done); | |
6692 } | |
6693 frame_->Push(rax); | |
6694 } | |
6695 | |
6696 | |
6609 class DeferredSearchCache: public DeferredCode { | 6697 class DeferredSearchCache: public DeferredCode { |
6610 public: | 6698 public: |
6611 DeferredSearchCache(Register dst, | 6699 DeferredSearchCache(Register dst, |
6612 Register cache, | 6700 Register cache, |
6613 Register key, | 6701 Register key, |
6614 Register scratch) | 6702 Register scratch) |
6615 : dst_(dst), cache_(cache), key_(key), scratch_(scratch) { | 6703 : dst_(dst), cache_(cache), key_(key), scratch_(scratch) { |
6616 set_comment("[ DeferredSearchCache"); | 6704 set_comment("[ DeferredSearchCache"); |
6617 } | 6705 } |
6618 | 6706 |
(...skipping 6142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
12761 #undef __ | 12849 #undef __ |
12762 | 12850 |
12763 void RecordWriteStub::Generate(MacroAssembler* masm) { | 12851 void RecordWriteStub::Generate(MacroAssembler* masm) { |
12764 masm->RecordWriteHelper(object_, addr_, scratch_); | 12852 masm->RecordWriteHelper(object_, addr_, scratch_); |
12765 masm->ret(0); | 12853 masm->ret(0); |
12766 } | 12854 } |
12767 | 12855 |
12768 } } // namespace v8::internal | 12856 } } // namespace v8::internal |
12769 | 12857 |
12770 #endif // V8_TARGET_ARCH_X64 | 12858 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |