Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(60)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 22715004: Version 3.20.15 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Add TypedArray API and correctness patches r16033 and r16084 Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64-inl.h ('k') | src/x64/code-stubs-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
52 // -- rsp[8] : last argument 52 // -- rsp[8] : last argument
53 // -- ... 53 // -- ...
54 // -- rsp[8 * argc] : first argument (argc == rax) 54 // -- rsp[8 * argc] : first argument (argc == rax)
55 // -- rsp[8 * (argc + 1)] : receiver 55 // -- rsp[8 * (argc + 1)] : receiver
56 // ----------------------------------- 56 // -----------------------------------
57 57
58 // Insert extra arguments. 58 // Insert extra arguments.
59 int num_extra_args = 0; 59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) { 60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1; 61 num_extra_args = 1;
62 __ PopReturnAddressTo(kScratchRegister); 62 __ pop(kScratchRegister); // Save return address.
63 __ push(rdi); 63 __ push(rdi);
64 __ PushReturnAddressFrom(kScratchRegister); 64 __ push(kScratchRegister); // Restore return address.
65 } else { 65 } else {
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); 66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67 } 67 }
68 68
69 // JumpToExternalReference expects rax to contain the number of arguments 69 // JumpToExternalReference expects rax to contain the number of arguments
70 // including the receiver and the extra arguments. 70 // including the receiver and the extra arguments.
71 __ addq(rax, Immediate(num_extra_args + 1)); 71 __ addq(rax, Immediate(num_extra_args + 1));
72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); 72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
73 } 73 }
74 74
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
242 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 242 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
243 if (count_constructions) { 243 if (count_constructions) {
244 __ movzxbq(rsi, 244 __ movzxbq(rsi,
245 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 245 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
246 __ lea(rsi, 246 __ lea(rsi,
247 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize)); 247 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
248 // rsi: offset of first field after pre-allocated fields 248 // rsi: offset of first field after pre-allocated fields
249 if (FLAG_debug_code) { 249 if (FLAG_debug_code) {
250 __ cmpq(rsi, rdi); 250 __ cmpq(rsi, rdi);
251 __ Assert(less_equal, 251 __ Assert(less_equal,
252 kUnexpectedNumberOfPreAllocatedPropertyFields); 252 "Unexpected number of pre-allocated property fields.");
253 } 253 }
254 __ InitializeFieldsWithFiller(rcx, rsi, rdx); 254 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
255 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); 255 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
256 } 256 }
257 __ InitializeFieldsWithFiller(rcx, rdi, rdx); 257 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
258 258
259 // Add the object tag to make the JSObject real, so that we can continue 259 // Add the object tag to make the JSObject real, so that we can continue
260 // and jump into the continuation code at any time from now on. Any 260 // and jump into the continuation code at any time from now on. Any
261 // failures need to undo the allocation, so that the heap is in a 261 // failures need to undo the allocation, so that the heap is in a
262 // consistent state and verifiable. 262 // consistent state and verifiable.
(...skipping 10 matching lines...) Expand all
273 // Calculate total properties described map. 273 // Calculate total properties described map.
274 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); 274 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
275 __ movzxbq(rcx, 275 __ movzxbq(rcx,
276 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 276 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
277 __ addq(rdx, rcx); 277 __ addq(rdx, rcx);
278 // Calculate unused properties past the end of the in-object properties. 278 // Calculate unused properties past the end of the in-object properties.
279 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset)); 279 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
280 __ subq(rdx, rcx); 280 __ subq(rdx, rcx);
281 // Done if no extra properties are to be allocated. 281 // Done if no extra properties are to be allocated.
282 __ j(zero, &allocated); 282 __ j(zero, &allocated);
283 __ Assert(positive, kPropertyAllocationCountFailed); 283 __ Assert(positive, "Property allocation count failed.");
284 284
285 // Scale the number of elements by pointer size and add the header for 285 // Scale the number of elements by pointer size and add the header for
286 // FixedArrays to the start of the next object calculation from above. 286 // FixedArrays to the start of the next object calculation from above.
287 // rbx: JSObject 287 // rbx: JSObject
288 // rdi: start of next object (will be start of FixedArray) 288 // rdi: start of next object (will be start of FixedArray)
289 // rdx: number of elements in properties array 289 // rdx: number of elements in properties array
290 __ Allocate(FixedArray::kHeaderSize, 290 __ Allocate(FixedArray::kHeaderSize,
291 times_pointer_size, 291 times_pointer_size,
292 rdx, 292 rdx,
293 rdi, 293 rdi,
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
422 __ movq(rax, Operand(rsp, 0)); 422 __ movq(rax, Operand(rsp, 0));
423 423
424 // Restore the arguments count and leave the construct frame. 424 // Restore the arguments count and leave the construct frame.
425 __ bind(&exit); 425 __ bind(&exit);
426 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count. 426 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
427 427
428 // Leave construct frame. 428 // Leave construct frame.
429 } 429 }
430 430
431 // Remove caller arguments from the stack and return. 431 // Remove caller arguments from the stack and return.
432 __ PopReturnAddressTo(rcx); 432 __ pop(rcx);
433 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 433 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
434 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 434 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
435 __ PushReturnAddressFrom(rcx); 435 __ push(rcx);
436 Counters* counters = masm->isolate()->counters(); 436 Counters* counters = masm->isolate()->counters();
437 __ IncrementCounter(counters->constructed_objects(), 1); 437 __ IncrementCounter(counters->constructed_objects(), 1);
438 __ ret(0); 438 __ ret(0);
439 } 439 }
440 440
441 441
442 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { 442 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
443 Generate_JSConstructStubHelper(masm, false, true); 443 Generate_JSConstructStubHelper(masm, false, true);
444 } 444 }
445 445
(...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after
716 __ j(not_equal, &not_no_registers, Label::kNear); 716 __ j(not_equal, &not_no_registers, Label::kNear);
717 __ ret(1 * kPointerSize); // Remove state. 717 __ ret(1 * kPointerSize); // Remove state.
718 718
719 __ bind(&not_no_registers); 719 __ bind(&not_no_registers);
720 __ movq(rax, Operand(rsp, 2 * kPointerSize)); 720 __ movq(rax, Operand(rsp, 2 * kPointerSize));
721 __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG)); 721 __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG));
722 __ j(not_equal, &not_tos_rax, Label::kNear); 722 __ j(not_equal, &not_tos_rax, Label::kNear);
723 __ ret(2 * kPointerSize); // Remove state, rax. 723 __ ret(2 * kPointerSize); // Remove state, rax.
724 724
725 __ bind(&not_tos_rax); 725 __ bind(&not_tos_rax);
726 __ Abort(kNoCasesLeft); 726 __ Abort("no cases left");
727 } 727 }
728 728
729 729
730 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 730 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
731 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 731 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
732 } 732 }
733 733
734 734
735 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 735 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
736 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 736 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
(...skipping 28 matching lines...) Expand all
765 // ... 765 // ...
766 // rsp[8 * n] : Argument 1 766 // rsp[8 * n] : Argument 1
767 // rsp[8 * (n + 1)] : Receiver (function to call) 767 // rsp[8 * (n + 1)] : Receiver (function to call)
768 // 768 //
769 // rax contains the number of arguments, n, not counting the receiver. 769 // rax contains the number of arguments, n, not counting the receiver.
770 // 770 //
771 // 1. Make sure we have at least one argument. 771 // 1. Make sure we have at least one argument.
772 { Label done; 772 { Label done;
773 __ testq(rax, rax); 773 __ testq(rax, rax);
774 __ j(not_zero, &done); 774 __ j(not_zero, &done);
775 __ PopReturnAddressTo(rbx); 775 __ pop(rbx);
776 __ Push(masm->isolate()->factory()->undefined_value()); 776 __ Push(masm->isolate()->factory()->undefined_value());
777 __ PushReturnAddressFrom(rbx); 777 __ push(rbx);
778 __ incq(rax); 778 __ incq(rax);
779 __ bind(&done); 779 __ bind(&done);
780 } 780 }
781 781
782 // 2. Get the function to call (passed as receiver) from the stack, check 782 // 2. Get the function to call (passed as receiver) from the stack, check
783 // if it is a function. 783 // if it is a function.
784 Label slow, non_function; 784 Label slow, non_function;
785 // The function to call is at position n+1 on the stack. 785 // The function to call is at position n+1 on the stack.
786 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); 786 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
787 __ JumpIfSmi(rdi, &non_function); 787 __ JumpIfSmi(rdi, &non_function);
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
888 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, 888 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
889 // or a function proxy via CALL_FUNCTION_PROXY. 889 // or a function proxy via CALL_FUNCTION_PROXY.
890 { Label function, non_proxy; 890 { Label function, non_proxy;
891 __ testq(rdx, rdx); 891 __ testq(rdx, rdx);
892 __ j(zero, &function); 892 __ j(zero, &function);
893 __ Set(rbx, 0); 893 __ Set(rbx, 0);
894 __ SetCallKind(rcx, CALL_AS_METHOD); 894 __ SetCallKind(rcx, CALL_AS_METHOD);
895 __ cmpq(rdx, Immediate(1)); 895 __ cmpq(rdx, Immediate(1));
896 __ j(not_equal, &non_proxy); 896 __ j(not_equal, &non_proxy);
897 897
898 __ PopReturnAddressTo(rdx); 898 __ pop(rdx); // return address
899 __ push(rdi); // re-add proxy object as additional argument 899 __ push(rdi); // re-add proxy object as additional argument
900 __ PushReturnAddressFrom(rdx); 900 __ push(rdx);
901 __ incq(rax); 901 __ incq(rax);
902 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 902 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
903 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 903 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
904 RelocInfo::CODE_TARGET); 904 RelocInfo::CODE_TARGET);
905 905
906 __ bind(&non_proxy); 906 __ bind(&non_proxy);
907 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 907 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
908 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 908 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
909 RelocInfo::CODE_TARGET); 909 RelocInfo::CODE_TARGET);
910 __ bind(&function); 910 __ bind(&function);
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after
1106 1106
1107 // Get the InternalArray function. 1107 // Get the InternalArray function.
1108 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); 1108 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1109 1109
1110 if (FLAG_debug_code) { 1110 if (FLAG_debug_code) {
1111 // Initial map for the builtin InternalArray functions should be maps. 1111 // Initial map for the builtin InternalArray functions should be maps.
1112 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1112 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1113 // Will both indicate a NULL and a Smi. 1113 // Will both indicate a NULL and a Smi.
1114 STATIC_ASSERT(kSmiTag == 0); 1114 STATIC_ASSERT(kSmiTag == 0);
1115 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1115 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1116 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); 1116 __ Check(not_smi, "Unexpected initial map for InternalArray function");
1117 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1117 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1118 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); 1118 __ Check(equal, "Unexpected initial map for InternalArray function");
1119 } 1119 }
1120 1120
1121 // Run the native code for the InternalArray function called as a normal 1121 // Run the native code for the InternalArray function called as a normal
1122 // function. 1122 // function.
1123 // tail call a stub 1123 // tail call a stub
1124 InternalArrayConstructorStub stub(masm->isolate()); 1124 InternalArrayConstructorStub stub(masm->isolate());
1125 __ TailCallStub(&stub); 1125 __ TailCallStub(&stub);
1126 } 1126 }
1127 1127
1128 1128
1129 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 1129 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1130 // ----------- S t a t e ------------- 1130 // ----------- S t a t e -------------
1131 // -- rax : argc 1131 // -- rax : argc
1132 // -- rsp[0] : return address 1132 // -- rsp[0] : return address
1133 // -- rsp[8] : last argument 1133 // -- rsp[8] : last argument
1134 // ----------------------------------- 1134 // -----------------------------------
1135 Label generic_array_code; 1135 Label generic_array_code;
1136 1136
1137 // Get the Array function. 1137 // Get the Array function.
1138 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi); 1138 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1139 1139
1140 if (FLAG_debug_code) { 1140 if (FLAG_debug_code) {
1141 // Initial map for the builtin Array functions should be maps. 1141 // Initial map for the builtin Array functions should be maps.
1142 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1142 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1143 // Will both indicate a NULL and a Smi. 1143 // Will both indicate a NULL and a Smi.
1144 STATIC_ASSERT(kSmiTag == 0); 1144 STATIC_ASSERT(kSmiTag == 0);
1145 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1145 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1146 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 1146 __ Check(not_smi, "Unexpected initial map for Array function");
1147 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1147 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1148 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 1148 __ Check(equal, "Unexpected initial map for Array function");
1149 } 1149 }
1150 1150
1151 // Run the native code for the Array function called as a normal function. 1151 // Run the native code for the Array function called as a normal function.
1152 // tail call a stub 1152 // tail call a stub
1153 Handle<Object> undefined_sentinel( 1153 Handle<Object> undefined_sentinel(
1154 masm->isolate()->heap()->undefined_value(), 1154 masm->isolate()->heap()->undefined_value(),
1155 masm->isolate()); 1155 masm->isolate());
1156 __ Move(rbx, undefined_sentinel); 1156 __ Move(rbx, undefined_sentinel);
1157 ArrayConstructorStub stub(masm->isolate()); 1157 ArrayConstructorStub stub(masm->isolate());
1158 __ TailCallStub(&stub); 1158 __ TailCallStub(&stub);
1159 } 1159 }
1160 1160
1161 1161
1162 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { 1162 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1163 // ----------- S t a t e ------------- 1163 // ----------- S t a t e -------------
1164 // -- rax : number of arguments 1164 // -- rax : number of arguments
1165 // -- rdi : constructor function 1165 // -- rdi : constructor function
1166 // -- rsp[0] : return address 1166 // -- rsp[0] : return address
1167 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1167 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1168 // -- rsp[(argc + 1) * 8] : receiver 1168 // -- rsp[(argc + 1) * 8] : receiver
1169 // ----------------------------------- 1169 // -----------------------------------
1170 Counters* counters = masm->isolate()->counters(); 1170 Counters* counters = masm->isolate()->counters();
1171 __ IncrementCounter(counters->string_ctor_calls(), 1); 1171 __ IncrementCounter(counters->string_ctor_calls(), 1);
1172 1172
1173 if (FLAG_debug_code) { 1173 if (FLAG_debug_code) {
1174 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx); 1174 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1175 __ cmpq(rdi, rcx); 1175 __ cmpq(rdi, rcx);
1176 __ Assert(equal, kUnexpectedStringFunction); 1176 __ Assert(equal, "Unexpected String function");
1177 } 1177 }
1178 1178
1179 // Load the first argument into rax and get rid of the rest 1179 // Load the first argument into rax and get rid of the rest
1180 // (including the receiver). 1180 // (including the receiver).
1181 Label no_arguments; 1181 Label no_arguments;
1182 __ testq(rax, rax); 1182 __ testq(rax, rax);
1183 __ j(zero, &no_arguments); 1183 __ j(zero, &no_arguments);
1184 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); 1184 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
1185 __ PopReturnAddressTo(rcx); 1185 __ pop(rcx);
1186 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1186 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1187 __ PushReturnAddressFrom(rcx); 1187 __ push(rcx);
1188 __ movq(rax, rbx); 1188 __ movq(rax, rbx);
1189 1189
1190 // Lookup the argument in the number to string cache. 1190 // Lookup the argument in the number to string cache.
1191 Label not_cached, argument_is_string; 1191 Label not_cached, argument_is_string;
1192 NumberToStringStub::GenerateLookupNumberStringCache( 1192 NumberToStringStub::GenerateLookupNumberStringCache(
1193 masm, 1193 masm,
1194 rax, // Input. 1194 rax, // Input.
1195 rbx, // Result. 1195 rbx, // Result.
1196 rcx, // Scratch 1. 1196 rcx, // Scratch 1.
1197 rdx, // Scratch 2. 1197 rdx, // Scratch 2.
(...skipping 14 matching lines...) Expand all
1212 rcx, // New allocation top (we ignore it). 1212 rcx, // New allocation top (we ignore it).
1213 no_reg, 1213 no_reg,
1214 &gc_required, 1214 &gc_required,
1215 TAG_OBJECT); 1215 TAG_OBJECT);
1216 1216
1217 // Set the map. 1217 // Set the map.
1218 __ LoadGlobalFunctionInitialMap(rdi, rcx); 1218 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1219 if (FLAG_debug_code) { 1219 if (FLAG_debug_code) {
1220 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset), 1220 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1221 Immediate(JSValue::kSize >> kPointerSizeLog2)); 1221 Immediate(JSValue::kSize >> kPointerSizeLog2));
1222 __ Assert(equal, kUnexpectedStringWrapperInstanceSize); 1222 __ Assert(equal, "Unexpected string wrapper instance size");
1223 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0)); 1223 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1224 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper); 1224 __ Assert(equal, "Unexpected unused properties of string wrapper");
1225 } 1225 }
1226 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx); 1226 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1227 1227
1228 // Set properties and elements. 1228 // Set properties and elements.
1229 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 1229 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1230 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx); 1230 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1231 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 1231 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1232 1232
1233 // Set the value. 1233 // Set the value.
1234 __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx); 1234 __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx);
(...skipping 26 matching lines...) Expand all
1261 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 1261 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1262 __ pop(rdi); 1262 __ pop(rdi);
1263 } 1263 }
1264 __ movq(rbx, rax); 1264 __ movq(rbx, rax);
1265 __ jmp(&argument_is_string); 1265 __ jmp(&argument_is_string);
1266 1266
1267 // Load the empty string into rbx, remove the receiver from the 1267 // Load the empty string into rbx, remove the receiver from the
1268 // stack, and jump back to the case where the argument is a string. 1268 // stack, and jump back to the case where the argument is a string.
1269 __ bind(&no_arguments); 1269 __ bind(&no_arguments);
1270 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); 1270 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1271 __ PopReturnAddressTo(rcx); 1271 __ pop(rcx);
1272 __ lea(rsp, Operand(rsp, kPointerSize)); 1272 __ lea(rsp, Operand(rsp, kPointerSize));
1273 __ PushReturnAddressFrom(rcx); 1273 __ push(rcx);
1274 __ jmp(&argument_is_string); 1274 __ jmp(&argument_is_string);
1275 1275
1276 // At this point the argument is already a string. Call runtime to 1276 // At this point the argument is already a string. Call runtime to
1277 // create a string wrapper. 1277 // create a string wrapper.
1278 __ bind(&gc_required); 1278 __ bind(&gc_required);
1279 __ IncrementCounter(counters->string_ctor_gc_required(), 1); 1279 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1280 { 1280 {
1281 FrameScope scope(masm, StackFrame::INTERNAL); 1281 FrameScope scope(masm, StackFrame::INTERNAL);
1282 __ push(rbx); 1282 __ push(rbx);
1283 __ CallRuntime(Runtime::kNewStringWrapper, 1); 1283 __ CallRuntime(Runtime::kNewStringWrapper, 1);
(...skipping 22 matching lines...) Expand all
1306 1306
1307 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1307 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1308 // Retrieve the number of arguments from the stack. Number is a Smi. 1308 // Retrieve the number of arguments from the stack. Number is a Smi.
1309 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1309 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1310 1310
1311 // Leave the frame. 1311 // Leave the frame.
1312 __ movq(rsp, rbp); 1312 __ movq(rsp, rbp);
1313 __ pop(rbp); 1313 __ pop(rbp);
1314 1314
1315 // Remove caller arguments from the stack. 1315 // Remove caller arguments from the stack.
1316 __ PopReturnAddressTo(rcx); 1316 __ pop(rcx);
1317 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 1317 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1318 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 1318 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1319 __ PushReturnAddressFrom(rcx); 1319 __ push(rcx);
1320 } 1320 }
1321 1321
1322 1322
1323 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1323 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1324 // ----------- S t a t e ------------- 1324 // ----------- S t a t e -------------
1325 // -- rax : actual number of arguments 1325 // -- rax : actual number of arguments
1326 // -- rbx : expected number of arguments 1326 // -- rbx : expected number of arguments
1327 // -- rcx : call kind information 1327 // -- rcx : call kind information
1328 // -- rdx : code entry to call 1328 // -- rdx : code entry to call
1329 // ----------------------------------- 1329 // -----------------------------------
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1434 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); 1434 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1435 generator.Generate(); 1435 generator.Generate();
1436 } 1436 }
1437 1437
1438 1438
1439 #undef __ 1439 #undef __
1440 1440
1441 } } // namespace v8::internal 1441 } } // namespace v8::internal
1442 1442
1443 #endif // V8_TARGET_ARCH_X64 1443 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/assembler-x64-inl.h ('k') | src/x64/code-stubs-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698