| Index: src/x64/codegen-x64.cc
|
| ===================================================================
|
| --- src/x64/codegen-x64.cc (revision 5316)
|
| +++ src/x64/codegen-x64.cc (working copy)
|
| @@ -2616,8 +2616,10 @@
|
| __ j(is_smi, &build_args);
|
| __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx);
|
| __ j(not_equal, &build_args);
|
| + __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset));
|
| + __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
|
| Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
|
| - __ Cmp(FieldOperand(rax, JSFunction::kCodeOffset), apply_code);
|
| + __ Cmp(FieldOperand(rcx, SharedFunctionInfo::kCodeOffset), apply_code);
|
| __ j(not_equal, &build_args);
|
|
|
| // Check that applicand is a function.
|
| @@ -4800,8 +4802,10 @@
|
|
|
| class DeferredAllocateInNewSpace: public DeferredCode {
|
| public:
|
| - DeferredAllocateInNewSpace(int size, Register target)
|
| - : size_(size), target_(target) {
|
| + DeferredAllocateInNewSpace(int size,
|
| + Register target,
|
| + int registers_to_save = 0)
|
| + : size_(size), target_(target), registers_to_save_(registers_to_save) {
|
| ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
|
| set_comment("[ DeferredAllocateInNewSpace");
|
| }
|
| @@ -4810,15 +4814,28 @@
|
| private:
|
| int size_;
|
| Register target_;
|
| + int registers_to_save_;
|
| };
|
|
|
|
|
| void DeferredAllocateInNewSpace::Generate() {
|
| + for (int i = 0; i < kNumRegs; i++) {
|
| + if (registers_to_save_ & (1 << i)) {
|
| + Register save_register = { i };
|
| + __ push(save_register);
|
| + }
|
| + }
|
| __ Push(Smi::FromInt(size_));
|
| __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
|
| if (!target_.is(rax)) {
|
| __ movq(target_, rax);
|
| }
|
| + for (int i = kNumRegs - 1; i >= 0; i--) {
|
| + if (registers_to_save_ & (1 << i)) {
|
| + Register save_register = { i };
|
| + __ push(save_register);
|
| + }
|
| + }
|
| }
|
|
|
|
|
| @@ -6606,6 +6623,79 @@
|
| }
|
|
|
|
|
| +void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) {
|
| + ASSERT_EQ(1, args->length());
|
| +
|
| + Load(args->at(0));
|
| + Result object_result = frame_->Pop();
|
| + object_result.ToRegister(rax);
|
| + object_result.Unuse();
|
| + {
|
| + VirtualFrame::SpilledScope spilled_scope;
|
| +
|
| + Label done;
|
| + __ JumpIfSmi(rax, &done);
|
| +
|
| + // Load JSRegExpResult map into rdx.
|
| + // Arguments to this function should be results of calling RegExp exec,
|
| + // which is either an unmodified JSRegExpResult or null. Anything not having
|
| + // the unmodified JSRegExpResult map is returned unmodified.
|
| + // This also ensures that elements are fast.
|
| +
|
| + __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
|
| + __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
|
| + __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
|
| + __ cmpq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
|
| + __ j(not_equal, &done);
|
| +
|
| + DeferredAllocateInNewSpace* allocate_fallback =
|
| + new DeferredAllocateInNewSpace(JSRegExpResult::kSize,
|
| + rbx,
|
| + rdx.bit() | rax.bit());
|
| +
|
| + // All set, copy the contents to a new object.
|
| + __ AllocateInNewSpace(JSRegExpResult::kSize,
|
| + rbx,
|
| + no_reg,
|
| + no_reg,
|
| + allocate_fallback->entry_label(),
|
| + TAG_OBJECT);
|
| + __ bind(allocate_fallback->exit_label());
|
| +
|
| + STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0);
|
| + // There is an even number of fields, so unroll the loop once
|
| + // for efficiency.
|
| + for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) {
|
| + STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0);
|
| + if (i != JSObject::kMapOffset) {
|
| + // The map was already loaded into edx.
|
| + __ movq(rdx, FieldOperand(rax, i));
|
| + }
|
| + __ movq(rcx, FieldOperand(rax, i + kPointerSize));
|
| +
|
| + STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0);
|
| + if (i == JSObject::kElementsOffset) {
|
| + // If the elements array isn't empty, make it copy-on-write
|
| + // before copying it.
|
| + Label empty;
|
| + __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
|
| + __ j(equal, &empty);
|
| + ASSERT(!Heap::InNewSpace(Heap::fixed_cow_array_map()));
|
| + __ LoadRoot(kScratchRegister, Heap::kFixedCOWArrayMapRootIndex);
|
| + __ movq(FieldOperand(rdx, HeapObject::kMapOffset), kScratchRegister);
|
| + __ bind(&empty);
|
| + }
|
| + __ movq(FieldOperand(rbx, i), rdx);
|
| + __ movq(FieldOperand(rbx, i + kPointerSize), rcx);
|
| + }
|
| + __ movq(rax, rbx);
|
| +
|
| + __ bind(&done);
|
| + }
|
| + frame_->Push(rax);
|
| +}
|
| +
|
| +
|
| class DeferredSearchCache: public DeferredCode {
|
| public:
|
| DeferredSearchCache(Register dst,
|
| @@ -8758,7 +8848,8 @@
|
| // Initialize the code pointer in the function to be the one
|
| // found in the shared function info object.
|
| __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
|
| - __ movq(FieldOperand(rax, JSFunction::kCodeOffset), rdx);
|
| + __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
|
| + __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
|
|
|
|
|
| // Return and remove the on-stack parameter.
|
|
|