Chromium Code Reviews| Index: src/x64/codegen-x64.cc |
| diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc |
| index 1885182287f70b34dd4a16a7fa73193b4bf8ae1b..5c84d7e736c0c859b7df5d0e5b80e29f4ce4840d 100644 |
| --- a/src/x64/codegen-x64.cc |
| +++ b/src/x64/codegen-x64.cc |
| @@ -4800,8 +4800,10 @@ void DeferredRegExpLiteral::Generate() { |
| class DeferredAllocateInNewSpace: public DeferredCode { |
| public: |
| - DeferredAllocateInNewSpace(int size, Register target) |
| - : size_(size), target_(target) { |
| + DeferredAllocateInNewSpace(int size, |
| + Register target, |
| + int registers_to_save = 0) |
| + : size_(size), target_(target), registers_to_save_(registers_to_save) { |
| ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); |
| set_comment("[ DeferredAllocateInNewSpace"); |
| } |
| @@ -4810,15 +4812,28 @@ class DeferredAllocateInNewSpace: public DeferredCode { |
| private: |
| int size_; |
| Register target_; |
| + int registers_to_save_; |
| }; |
| void DeferredAllocateInNewSpace::Generate() { |
| + for (int i = 0; i < kNumRegs; i++) { |
| + if (registers_to_save_ & (1 << i)) { |
| + Register save_register = { i }; |
| + __ push(save_register); |
| + } |
| + } |
| __ Push(Smi::FromInt(size_)); |
| __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
| if (!target_.is(rax)) { |
| __ movq(target_, rax); |
| } |
| + for (int i = kNumRegs - 1; i >= 0; i--) { |
| + if (registers_to_save_ & (1 << i)) { |
| + Register save_register = { i }; |
| + __ push(save_register); |
|
Vitaly Repeshko
2010/08/20 13:29:49
push -> pop.
Lasse Reichstein
2010/08/23 12:52:11
Double-doh!
Fixed, for real.
|
| + } |
| + } |
| } |
| @@ -6606,6 +6621,79 @@ void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) { |
| } |
| +void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) { |
| + ASSERT_EQ(1, args->length()); |
| + |
| + Load(args->at(0)); |
| + Result object_result = frame_->Pop(); |
| + object_result.ToRegister(rax); |
| + object_result.Unuse(); |
| + { |
| + VirtualFrame::SpilledScope spilled_scope; |
| + |
| + Label done; |
| + __ JumpIfSmi(rax, &done); |
| + |
| + // Load JSRegExpResult map into rdx. |
| + // Arguments to this function should be results of calling RegExp exec, |
| + // which is either an unmodified JSRegExpResult or null. Anything not having |
| + // the unmodified JSRegExpResult map is returned unmodified. |
| + // This also ensures that elements are fast. |
| + |
| + __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); |
| + __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset)); |
| + __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); |
| + __ cmpq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); |
| + __ j(not_equal, &done); |
| + |
| + DeferredAllocateInNewSpace* allocate_fallback = |
| + new DeferredAllocateInNewSpace(JSRegExpResult::kSize, |
| + rbx, |
| + rdx.bit() | rax.bit()); |
| + |
| + // All set, copy the contents to a new object. |
| + __ AllocateInNewSpace(JSRegExpResult::kSize, |
| + rbx, |
| + no_reg, |
| + no_reg, |
| + allocate_fallback->entry_label(), |
| + TAG_OBJECT); |
| + __ bind(allocate_fallback->exit_label()); |
| + |
| + STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0); |
| + // There is an even number of fields, so unroll the loop once |
| + // for efficiency. |
| + for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) { |
| + STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0); |
| + if (i != JSObject::kMapOffset) { |
| + // The map was already loaded into edx. |
| + __ movq(rdx, FieldOperand(rax, i)); |
| + } |
| + __ movq(rcx, FieldOperand(rax, i + kPointerSize)); |
| + |
| + STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0); |
| + if (i == JSObject::kElementsOffset) { |
| + // If the elements array isn't empty, make it copy-on-write |
| + // before copying it. |
| + Label empty; |
| + __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex); |
| + __ j(equal, &empty); |
| + ASSERT(!Heap::InNewSpace(Heap::fixed_cow_array_map())); |
| + __ LoadRoot(kScratchRegister, Heap::kFixedCOWArrayMapRootIndex); |
| + __ movq(FieldOperand(rdx, HeapObject::kMapOffset), kScratchRegister); |
| + __ bind(&empty); |
| + } |
| + __ movq(FieldOperand(rbx, i), rdx); |
| + __ movq(FieldOperand(rbx, i + kPointerSize), rcx); |
| + } |
| + __ movq(rax, rbx); |
| + |
| + __ bind(&done); |
| + } |
| + frame_->Push(rax); |
| +} |
| + |
| + |
| class DeferredSearchCache: public DeferredCode { |
| public: |
| DeferredSearchCache(Register dst, |