Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(17)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 196139: X64: Convert smis to holding 32 bits of payload. (Closed)
Patch Set: Addressed review comments. Forwarded to head. Created 11 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x64/assembler-x64-inl.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
46 __ incq(rax); 46 __ incq(rax);
47 __ JumpToRuntime(ExternalReference(id), 1); 47 __ JumpToRuntime(ExternalReference(id), 1);
48 } 48 }
49 49
50 50
51 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 51 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
52 __ push(rbp); 52 __ push(rbp);
53 __ movq(rbp, rsp); 53 __ movq(rbp, rsp);
54 54
55 // Store the arguments adaptor context sentinel. 55 // Store the arguments adaptor context sentinel.
56 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 56 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
57 57
58 // Push the function on the stack. 58 // Push the function on the stack.
59 __ push(rdi); 59 __ push(rdi);
60 60
61 // Preserve the number of arguments on the stack. Must preserve both 61 // Preserve the number of arguments on the stack. Must preserve both
62 // rax and rbx because these registers are used when copying the 62 // rax and rbx because these registers are used when copying the
63 // arguments and the receiver. 63 // arguments and the receiver.
64 __ Integer32ToSmi(rcx, rax); 64 __ Integer32ToSmi(rcx, rax);
65 __ push(rcx); 65 __ push(rcx);
66 } 66 }
67 67
68 68
69 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 69 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
70 // Retrieve the number of arguments from the stack. Number is a Smi. 70 // Retrieve the number of arguments from the stack. Number is a Smi.
71 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 71 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
72 72
73 // Leave the frame. 73 // Leave the frame.
74 __ movq(rsp, rbp); 74 __ movq(rsp, rbp);
75 __ pop(rbp); 75 __ pop(rbp);
76 76
77 // Remove caller arguments from the stack. 77 // Remove caller arguments from the stack.
78 // rbx holds a Smi, so we convery to dword offset by multiplying by 4.
79 // TODO(smi): Find a way to abstract indexing by a smi.
80 ASSERT_EQ(kSmiTagSize, 1 && kSmiTag == 0);
81 ASSERT_EQ(kPointerSize, (1 << kSmiTagSize) * 4);
82 // TODO(smi): Find way to abstract indexing by a smi.
83 __ pop(rcx); 78 __ pop(rcx);
84 // 1 * kPointerSize is offset of receiver. 79 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
85 __ lea(rsp, Operand(rsp, rbx, times_half_pointer_size, 1 * kPointerSize)); 80 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
86 __ push(rcx); 81 __ push(rcx);
87 } 82 }
88 83
89 84
90 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 85 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
91 // ----------- S t a t e ------------- 86 // ----------- S t a t e -------------
92 // -- rax : actual number of arguments 87 // -- rax : actual number of arguments
93 // -- rbx : expected number of arguments 88 // -- rbx : expected number of arguments
94 // -- rdx : code entry to call 89 // -- rdx : code entry to call
95 // ----------------------------------- 90 // -----------------------------------
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after
335 __ movq(rcx, rsp); 330 __ movq(rcx, rsp);
336 __ subq(rcx, Operand(kScratchRegister, 0)); 331 __ subq(rcx, Operand(kScratchRegister, 0));
337 // rcx contains the difference between the stack limit and the stack top. 332 // rcx contains the difference between the stack limit and the stack top.
338 // We use it below to check that there is enough room for the arguments. 333 // We use it below to check that there is enough room for the arguments.
339 __ j(above, &no_preemption); 334 __ j(above, &no_preemption);
340 335
341 // Preemption! 336 // Preemption!
342 // Because runtime functions always remove the receiver from the stack, we 337 // Because runtime functions always remove the receiver from the stack, we
343 // have to fake one to avoid underflowing the stack. 338 // have to fake one to avoid underflowing the stack.
344 __ push(rax); 339 __ push(rax);
345 __ push(Immediate(Smi::FromInt(0))); 340 __ Push(Smi::FromInt(0));
346 341
347 // Do call to runtime routine. 342 // Do call to runtime routine.
348 __ CallRuntime(Runtime::kStackGuard, 1); 343 __ CallRuntime(Runtime::kStackGuard, 1);
349 __ pop(rax); 344 __ pop(rax);
350 __ jmp(&retry_preemption); 345 __ jmp(&retry_preemption);
351 346
352 __ bind(&no_preemption); 347 __ bind(&no_preemption);
353 348
354 Label okay; 349 Label okay;
355 // Make rdx the space we need for the array when it is unrolled onto the 350 // Make rdx the space we need for the array when it is unrolled onto the
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
427 // call. A test instruction after the call is used to indicate that 422 // call. A test instruction after the call is used to indicate that
428 // we have generated an inline version of the keyed load. In this 423 // we have generated an inline version of the keyed load. In this
429 // case, we know that we are not generating a test instruction next. 424 // case, we know that we are not generating a test instruction next.
430 425
431 // Remove IC arguments from the stack and push the nth argument. 426 // Remove IC arguments from the stack and push the nth argument.
432 __ addq(rsp, Immediate(2 * kPointerSize)); 427 __ addq(rsp, Immediate(2 * kPointerSize));
433 __ push(rax); 428 __ push(rax);
434 429
435 // Update the index on the stack and in register rax. 430 // Update the index on the stack and in register rax.
436 __ movq(rax, Operand(rbp, kIndexOffset)); 431 __ movq(rax, Operand(rbp, kIndexOffset));
437 __ addq(rax, Immediate(Smi::FromInt(1))); 432 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
438 __ movq(Operand(rbp, kIndexOffset), rax); 433 __ movq(Operand(rbp, kIndexOffset), rax);
439 434
440 __ bind(&entry); 435 __ bind(&entry);
441 __ cmpq(rax, Operand(rbp, kLimitOffset)); 436 __ cmpq(rax, Operand(rbp, kLimitOffset));
442 __ j(not_equal, &loop); 437 __ j(not_equal, &loop);
443 438
444 // Invoke the function. 439 // Invoke the function.
445 ParameterCount actual(rax); 440 ParameterCount actual(rax);
446 __ SmiToInteger32(rax, rax); 441 __ SmiToInteger32(rax, rax);
447 __ movq(rdi, Operand(rbp, kFunctionOffset)); 442 __ movq(rdi, Operand(rbp, kFunctionOffset));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
500 495
501 // Allocated the JSArray. Now initialize the fields except for the elements 496 // Allocated the JSArray. Now initialize the fields except for the elements
502 // array. 497 // array.
503 // result: JSObject 498 // result: JSObject
504 // scratch1: initial map 499 // scratch1: initial map
505 // scratch2: start of next object 500 // scratch2: start of next object
506 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1); 501 __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
507 __ Move(FieldOperand(result, JSArray::kPropertiesOffset), 502 __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
508 Factory::empty_fixed_array()); 503 Factory::empty_fixed_array());
509 // Field JSArray::kElementsOffset is initialized later. 504 // Field JSArray::kElementsOffset is initialized later.
510 __ movq(FieldOperand(result, JSArray::kLengthOffset), Immediate(0)); 505 __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
511 506
512 // If no storage is requested for the elements array just set the empty 507 // If no storage is requested for the elements array just set the empty
513 // fixed array. 508 // fixed array.
514 if (initial_capacity == 0) { 509 if (initial_capacity == 0) {
515 __ Move(FieldOperand(result, JSArray::kElementsOffset), 510 __ Move(FieldOperand(result, JSArray::kElementsOffset),
516 Factory::empty_fixed_array()); 511 Factory::empty_fixed_array());
517 return; 512 return;
518 } 513 }
519 514
520 // Calculate the location of the elements array and set elements array member 515 // Calculate the location of the elements array and set elements array member
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
711 __ IncrementCounter(&Counters::array_function_native, 1); 706 __ IncrementCounter(&Counters::array_function_native, 1);
712 __ movq(rax, rbx); 707 __ movq(rax, rbx);
713 __ ret(kPointerSize); 708 __ ret(kPointerSize);
714 709
715 // Check for one argument. Bail out if argument is not smi or if it is 710 // Check for one argument. Bail out if argument is not smi or if it is
716 // negative. 711 // negative.
717 __ bind(&argc_one_or_more); 712 __ bind(&argc_one_or_more);
718 __ cmpq(rax, Immediate(1)); 713 __ cmpq(rax, Immediate(1));
719 __ j(not_equal, &argc_two_or_more); 714 __ j(not_equal, &argc_two_or_more);
720 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack. 715 __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
721 Condition not_positive_smi = __ CheckNotPositiveSmi(rdx); 716 __ JumpIfNotPositiveSmi(rdx, call_generic_code);
722 __ j(not_positive_smi, call_generic_code);
723 717
724 // Handle construction of an empty array of a certain size. Bail out if size 718 // Handle construction of an empty array of a certain size. Bail out if size
725 // is to large to actually allocate an elements array. 719 // is to large to actually allocate an elements array.
726 __ JumpIfSmiGreaterEqualsConstant(rdx, 720 __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
727 JSObject::kInitialMaxFastElementArray, 721 __ j(greater_equal, call_generic_code);
728 call_generic_code);
729 722
730 // rax: argc 723 // rax: argc
731 // rdx: array_size (smi) 724 // rdx: array_size (smi)
732 // rdi: constructor 725 // rdi: constructor
733 // esp[0]: return address 726 // esp[0]: return address
734 // esp[8]: argument 727 // esp[8]: argument
735 AllocateJSArray(masm, 728 AllocateJSArray(masm,
736 rdi, 729 rdi,
737 rdx, 730 rdx,
738 rbx, 731 rbx,
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
818 Label generic_array_code; 811 Label generic_array_code;
819 812
820 // Get the Array function. 813 // Get the Array function.
821 GenerateLoadArrayFunction(masm, rdi); 814 GenerateLoadArrayFunction(masm, rdi);
822 815
823 if (FLAG_debug_code) { 816 if (FLAG_debug_code) {
824 // Initial map for the builtin Array function shoud be a map. 817 // Initial map for the builtin Array function shoud be a map.
825 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 818 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
826 // Will both indicate a NULL and a Smi. 819 // Will both indicate a NULL and a Smi.
827 ASSERT(kSmiTag == 0); 820 ASSERT(kSmiTag == 0);
828 Condition not_smi = __ CheckNotSmi(rbx); 821 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
829 __ Assert(not_smi, "Unexpected initial map for Array function"); 822 __ Check(not_smi, "Unexpected initial map for Array function");
830 __ CmpObjectType(rbx, MAP_TYPE, rcx); 823 __ CmpObjectType(rbx, MAP_TYPE, rcx);
831 __ Assert(equal, "Unexpected initial map for Array function"); 824 __ Check(equal, "Unexpected initial map for Array function");
832 } 825 }
833 826
834 // Run the native code for the Array function called as a normal function. 827 // Run the native code for the Array function called as a normal function.
835 ArrayNativeCode(masm, &generic_array_code); 828 ArrayNativeCode(masm, &generic_array_code);
836 829
837 // Jump to the generic array code in case the specialized code cannot handle 830 // Jump to the generic array code in case the specialized code cannot handle
838 // the construction. 831 // the construction.
839 __ bind(&generic_array_code); 832 __ bind(&generic_array_code);
840 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric); 833 Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
841 Handle<Code> array_code(code); 834 Handle<Code> array_code(code);
842 __ Jump(array_code, RelocInfo::CODE_TARGET); 835 __ Jump(array_code, RelocInfo::CODE_TARGET);
843 } 836 }
844 837
845 838
846 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) { 839 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
847 // ----------- S t a t e ------------- 840 // ----------- S t a t e -------------
848 // -- rax : argc 841 // -- rax : argc
849 // -- rdi : constructor 842 // -- rdi : constructor
850 // -- rsp[0] : return address 843 // -- rsp[0] : return address
851 // -- rsp[8] : last argument 844 // -- rsp[8] : last argument
852 // ----------------------------------- 845 // -----------------------------------
853 Label generic_constructor; 846 Label generic_constructor;
854 847
855 if (FLAG_debug_code) { 848 if (FLAG_debug_code) {
856 // The array construct code is only set for the builtin Array function which 849 // The array construct code is only set for the builtin Array function which
857 // does always have a map. 850 // does always have a map.
858 GenerateLoadArrayFunction(masm, rbx); 851 GenerateLoadArrayFunction(masm, rbx);
859 __ cmpq(rdi, rbx); 852 __ cmpq(rdi, rbx);
860 __ Assert(equal, "Unexpected Array function"); 853 __ Check(equal, "Unexpected Array function");
861 // Initial map for the builtin Array function should be a map. 854 // Initial map for the builtin Array function should be a map.
862 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 855 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
863 // Will both indicate a NULL and a Smi. 856 // Will both indicate a NULL and a Smi.
864 ASSERT(kSmiTag == 0); 857 ASSERT(kSmiTag == 0);
865 Condition not_smi = __ CheckNotSmi(rbx); 858 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
866 __ Assert(not_smi, "Unexpected initial map for Array function"); 859 __ Check(not_smi, "Unexpected initial map for Array function");
867 __ CmpObjectType(rbx, MAP_TYPE, rcx); 860 __ CmpObjectType(rbx, MAP_TYPE, rcx);
868 __ Assert(equal, "Unexpected initial map for Array function"); 861 __ Check(equal, "Unexpected initial map for Array function");
869 } 862 }
870 863
871 // Run the native code for the Array function called as constructor. 864 // Run the native code for the Array function called as constructor.
872 ArrayNativeCode(masm, &generic_constructor); 865 ArrayNativeCode(masm, &generic_constructor);
873 866
874 // Jump to the generic construct code in case the specialized code cannot 867 // Jump to the generic construct code in case the specialized code cannot
875 // handle the construction. 868 // handle the construction.
876 __ bind(&generic_constructor); 869 __ bind(&generic_constructor);
877 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); 870 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
878 Handle<Code> generic_construct_stub(code); 871 Handle<Code> generic_construct_stub(code);
(...skipping 16 matching lines...) Expand all
895 888
896 // Jump to the function-specific construct stub. 889 // Jump to the function-specific construct stub.
897 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 890 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
898 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset)); 891 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
899 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize)); 892 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
900 __ jmp(rbx); 893 __ jmp(rbx);
901 894
902 // edi: called object 895 // edi: called object
903 // eax: number of arguments 896 // eax: number of arguments
904 __ bind(&non_function_call); 897 __ bind(&non_function_call);
905
906 // Set expected number of arguments to zero (not changing eax). 898 // Set expected number of arguments to zero (not changing eax).
907 __ movq(rbx, Immediate(0)); 899 __ movq(rbx, Immediate(0));
908 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 900 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
909 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), 901 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
910 RelocInfo::CODE_TARGET); 902 RelocInfo::CODE_TARGET);
911 } 903 }
912 904
913 905
914 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 906 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
915 // Enter a construct frame. 907 // Enter a construct frame.
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
1136 // on-stack receiver as the result. 1128 // on-stack receiver as the result.
1137 __ bind(&use_receiver); 1129 __ bind(&use_receiver);
1138 __ movq(rax, Operand(rsp, 0)); 1130 __ movq(rax, Operand(rsp, 0));
1139 1131
1140 // Restore the arguments count and leave the construct frame. 1132 // Restore the arguments count and leave the construct frame.
1141 __ bind(&exit); 1133 __ bind(&exit);
1142 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count 1134 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
1143 __ LeaveConstructFrame(); 1135 __ LeaveConstructFrame();
1144 1136
1145 // Remove caller arguments from the stack and return. 1137 // Remove caller arguments from the stack and return.
1146 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1147 // TODO(smi): Find a way to abstract indexing by a smi.
1148 __ pop(rcx); 1138 __ pop(rcx);
1149 // 1 * kPointerSize is offset of receiver. 1139 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1150 __ lea(rsp, Operand(rsp, rbx, times_half_pointer_size, 1 * kPointerSize)); 1140 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1151 __ push(rcx); 1141 __ push(rcx);
1152 __ IncrementCounter(&Counters::constructed_objects, 1); 1142 __ IncrementCounter(&Counters::constructed_objects, 1);
1153 __ ret(0); 1143 __ ret(0);
1154 } 1144 }
1155 1145
1156 1146
1157 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 1147 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1158 bool is_construct) { 1148 bool is_construct) {
1159 // Expects five C++ function parameters. 1149 // Expects five C++ function parameters.
1160 // - Address entry (ignored) 1150 // - Address entry (ignored)
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
1271 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 1261 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1272 Generate_JSEntryTrampolineHelper(masm, false); 1262 Generate_JSEntryTrampolineHelper(masm, false);
1273 } 1263 }
1274 1264
1275 1265
1276 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 1266 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1277 Generate_JSEntryTrampolineHelper(masm, true); 1267 Generate_JSEntryTrampolineHelper(masm, true);
1278 } 1268 }
1279 1269
1280 } } // namespace v8::internal 1270 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/assembler-x64-inl.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698