Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(618)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 3197010: Version 2.3.10... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/codegen-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2598 matching lines...) Expand 10 before | Expand all | Expand 10 after
2609 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 2609 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2610 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); 2610 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
2611 __ j(below, &build_args); 2611 __ j(below, &build_args);
2612 2612
2613 // Check that applicand.apply is Function.prototype.apply. 2613 // Check that applicand.apply is Function.prototype.apply.
2614 __ movq(rax, Operand(rsp, kPointerSize)); 2614 __ movq(rax, Operand(rsp, kPointerSize));
2615 is_smi = masm_->CheckSmi(rax); 2615 is_smi = masm_->CheckSmi(rax);
2616 __ j(is_smi, &build_args); 2616 __ j(is_smi, &build_args);
2617 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx); 2617 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx);
2618 __ j(not_equal, &build_args); 2618 __ j(not_equal, &build_args);
2619 __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset));
2620 __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2619 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 2621 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
2620 __ Cmp(FieldOperand(rax, JSFunction::kCodeOffset), apply_code); 2622 __ Cmp(FieldOperand(rcx, SharedFunctionInfo::kCodeOffset), apply_code);
2621 __ j(not_equal, &build_args); 2623 __ j(not_equal, &build_args);
2622 2624
2623 // Check that applicand is a function. 2625 // Check that applicand is a function.
2624 __ movq(rdi, Operand(rsp, 2 * kPointerSize)); 2626 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
2625 is_smi = masm_->CheckSmi(rdi); 2627 is_smi = masm_->CheckSmi(rdi);
2626 __ j(is_smi, &build_args); 2628 __ j(is_smi, &build_args);
2627 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2629 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2628 __ j(not_equal, &build_args); 2630 __ j(not_equal, &build_args);
2629 2631
2630 // Copy the arguments to this function possibly from the 2632 // Copy the arguments to this function possibly from the
(...skipping 2162 matching lines...) Expand 10 before | Expand all | Expand 10 after
4793 __ Push(node_->pattern()); 4795 __ Push(node_->pattern());
4794 // RegExp flags (3). 4796 // RegExp flags (3).
4795 __ Push(node_->flags()); 4797 __ Push(node_->flags());
4796 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 4798 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
4797 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); 4799 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
4798 } 4800 }
4799 4801
4800 4802
4801 class DeferredAllocateInNewSpace: public DeferredCode { 4803 class DeferredAllocateInNewSpace: public DeferredCode {
4802 public: 4804 public:
4803 DeferredAllocateInNewSpace(int size, Register target) 4805 DeferredAllocateInNewSpace(int size,
4804 : size_(size), target_(target) { 4806 Register target,
4807 int registers_to_save = 0)
4808 : size_(size), target_(target), registers_to_save_(registers_to_save) {
4805 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); 4809 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
4806 set_comment("[ DeferredAllocateInNewSpace"); 4810 set_comment("[ DeferredAllocateInNewSpace");
4807 } 4811 }
4808 void Generate(); 4812 void Generate();
4809 4813
4810 private: 4814 private:
4811 int size_; 4815 int size_;
4812 Register target_; 4816 Register target_;
4817 int registers_to_save_;
4813 }; 4818 };
4814 4819
4815 4820
4816 void DeferredAllocateInNewSpace::Generate() { 4821 void DeferredAllocateInNewSpace::Generate() {
4822 for (int i = 0; i < kNumRegs; i++) {
4823 if (registers_to_save_ & (1 << i)) {
4824 Register save_register = { i };
4825 __ push(save_register);
4826 }
4827 }
4817 __ Push(Smi::FromInt(size_)); 4828 __ Push(Smi::FromInt(size_));
4818 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 4829 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4819 if (!target_.is(rax)) { 4830 if (!target_.is(rax)) {
4820 __ movq(target_, rax); 4831 __ movq(target_, rax);
4821 } 4832 }
4833 for (int i = kNumRegs - 1; i >= 0; i--) {
4834 if (registers_to_save_ & (1 << i)) {
4835 Register save_register = { i };
4836 __ push(save_register);
4837 }
4838 }
4822 } 4839 }
4823 4840
4824 4841
4825 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { 4842 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
4826 Comment cmnt(masm_, "[ RegExp Literal"); 4843 Comment cmnt(masm_, "[ RegExp Literal");
4827 4844
4828 // Retrieve the literals array and check the allocated entry. Begin 4845 // Retrieve the literals array and check the allocated entry. Begin
4829 // with a writable copy of the function of this activation in a 4846 // with a writable copy of the function of this activation in a
4830 // register. 4847 // register.
4831 frame_->PushFunction(); 4848 frame_->PushFunction();
(...skipping 1767 matching lines...) Expand 10 before | Expand all | Expand 10 after
6599 __ bind(&slowcase); 6616 __ bind(&slowcase);
6600 __ CallRuntime(Runtime::kRegExpConstructResult, 3); 6617 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
6601 6618
6602 __ bind(&done); 6619 __ bind(&done);
6603 } 6620 }
6604 frame_->Forget(3); 6621 frame_->Forget(3);
6605 frame_->Push(rax); 6622 frame_->Push(rax);
6606 } 6623 }
6607 6624
6608 6625
6626 void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) {
6627 ASSERT_EQ(1, args->length());
6628
6629 Load(args->at(0));
6630 Result object_result = frame_->Pop();
6631 object_result.ToRegister(rax);
6632 object_result.Unuse();
6633 {
6634 VirtualFrame::SpilledScope spilled_scope;
6635
6636 Label done;
6637 __ JumpIfSmi(rax, &done);
6638
6639 // Load JSRegExpResult map into rdx.
6640 // Arguments to this function should be results of calling RegExp exec,
6641 // which is either an unmodified JSRegExpResult or null. Anything not having
6642 // the unmodified JSRegExpResult map is returned unmodified.
6643 // This also ensures that elements are fast.
6644
6645 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
6646 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
6647 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
6648 __ cmpq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
6649 __ j(not_equal, &done);
6650
6651 DeferredAllocateInNewSpace* allocate_fallback =
6652 new DeferredAllocateInNewSpace(JSRegExpResult::kSize,
6653 rbx,
6654 rdx.bit() | rax.bit());
6655
6656 // All set, copy the contents to a new object.
6657 __ AllocateInNewSpace(JSRegExpResult::kSize,
6658 rbx,
6659 no_reg,
6660 no_reg,
6661 allocate_fallback->entry_label(),
6662 TAG_OBJECT);
6663 __ bind(allocate_fallback->exit_label());
6664
6665 STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0);
6666 // There is an even number of fields, so unroll the loop once
6667 // for efficiency.
6668 for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) {
6669 STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0);
6670 if (i != JSObject::kMapOffset) {
6671 // The map was already loaded into edx.
6672 __ movq(rdx, FieldOperand(rax, i));
6673 }
6674 __ movq(rcx, FieldOperand(rax, i + kPointerSize));
6675
6676 STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0);
6677 if (i == JSObject::kElementsOffset) {
6678 // If the elements array isn't empty, make it copy-on-write
6679 // before copying it.
6680 Label empty;
6681 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
6682 __ j(equal, &empty);
6683 ASSERT(!Heap::InNewSpace(Heap::fixed_cow_array_map()));
6684 __ LoadRoot(kScratchRegister, Heap::kFixedCOWArrayMapRootIndex);
6685 __ movq(FieldOperand(rdx, HeapObject::kMapOffset), kScratchRegister);
6686 __ bind(&empty);
6687 }
6688 __ movq(FieldOperand(rbx, i), rdx);
6689 __ movq(FieldOperand(rbx, i + kPointerSize), rcx);
6690 }
6691 __ movq(rax, rbx);
6692
6693 __ bind(&done);
6694 }
6695 frame_->Push(rax);
6696 }
6697
6698
6609 class DeferredSearchCache: public DeferredCode { 6699 class DeferredSearchCache: public DeferredCode {
6610 public: 6700 public:
6611 DeferredSearchCache(Register dst, 6701 DeferredSearchCache(Register dst,
6612 Register cache, 6702 Register cache,
6613 Register key, 6703 Register key,
6614 Register scratch) 6704 Register scratch)
6615 : dst_(dst), cache_(cache), key_(key), scratch_(scratch) { 6705 : dst_(dst), cache_(cache), key_(key), scratch_(scratch) {
6616 set_comment("[ DeferredSearchCache"); 6706 set_comment("[ DeferredSearchCache");
6617 } 6707 }
6618 6708
(...skipping 2132 matching lines...) Expand 10 before | Expand all | Expand 10 after
8751 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); 8841 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
8752 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); 8842 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
8753 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx); 8843 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
8754 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx); 8844 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
8755 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi); 8845 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
8756 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx); 8846 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
8757 8847
8758 // Initialize the code pointer in the function to be the one 8848 // Initialize the code pointer in the function to be the one
8759 // found in the shared function info object. 8849 // found in the shared function info object.
8760 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); 8850 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
8761 __ movq(FieldOperand(rax, JSFunction::kCodeOffset), rdx); 8851 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
8852 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx);
8762 8853
8763 8854
8764 // Return and remove the on-stack parameter. 8855 // Return and remove the on-stack parameter.
8765 __ ret(1 * kPointerSize); 8856 __ ret(1 * kPointerSize);
8766 8857
8767 // Create a new closure through the slower runtime call. 8858 // Create a new closure through the slower runtime call.
8768 __ bind(&gc); 8859 __ bind(&gc);
8769 __ pop(rcx); // Temporarily remove return address. 8860 __ pop(rcx); // Temporarily remove return address.
8770 __ pop(rdx); 8861 __ pop(rdx);
8771 __ push(rsi); 8862 __ push(rsi);
(...skipping 3989 matching lines...) Expand 10 before | Expand all | Expand 10 after
12761 #undef __ 12852 #undef __
12762 12853
12763 void RecordWriteStub::Generate(MacroAssembler* masm) { 12854 void RecordWriteStub::Generate(MacroAssembler* masm) {
12764 masm->RecordWriteHelper(object_, addr_, scratch_); 12855 masm->RecordWriteHelper(object_, addr_, scratch_);
12765 masm->ret(0); 12856 masm->ret(0);
12766 } 12857 }
12767 12858
12768 } } // namespace v8::internal 12859 } } // namespace v8::internal
12769 12860
12770 #endif // V8_TARGET_ARCH_X64 12861 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/codegen-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698