Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(277)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 185653004: Experimental parser: merge to r19637 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 Isolate* isolate, 43 Isolate* isolate,
44 CodeStubInterfaceDescriptor* descriptor) { 44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { rbx }; 45 static Register registers[] = { rbx };
46 descriptor->register_param_count_ = 1; 46 descriptor->register_param_count_ = 1;
47 descriptor->register_params_ = registers; 47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ = 48 descriptor->deoptimization_handler_ =
49 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; 49 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
50 } 50 }
51 51
52 52
53 void FastNewContextStub::InitializeInterfaceDescriptor(
54 Isolate* isolate,
55 CodeStubInterfaceDescriptor* descriptor) {
56 static Register registers[] = { rdi };
57 descriptor->register_param_count_ = 1;
58 descriptor->register_params_ = registers;
59 descriptor->deoptimization_handler_ = NULL;
60 }
61
62
53 void ToNumberStub::InitializeInterfaceDescriptor( 63 void ToNumberStub::InitializeInterfaceDescriptor(
54 Isolate* isolate, 64 Isolate* isolate,
55 CodeStubInterfaceDescriptor* descriptor) { 65 CodeStubInterfaceDescriptor* descriptor) {
56 static Register registers[] = { rax }; 66 static Register registers[] = { rax };
57 descriptor->register_param_count_ = 1; 67 descriptor->register_param_count_ = 1;
58 descriptor->register_params_ = registers; 68 descriptor->register_params_ = registers;
59 descriptor->deoptimization_handler_ = NULL; 69 descriptor->deoptimization_handler_ = NULL;
60 } 70 }
61 71
62 72
(...skipping 26 matching lines...) Expand all
89 descriptor->register_param_count_ = 4; 99 descriptor->register_param_count_ = 4;
90 descriptor->register_params_ = registers; 100 descriptor->register_params_ = registers;
91 descriptor->deoptimization_handler_ = 101 descriptor->deoptimization_handler_ =
92 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; 102 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
93 } 103 }
94 104
95 105
96 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( 106 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
97 Isolate* isolate, 107 Isolate* isolate,
98 CodeStubInterfaceDescriptor* descriptor) { 108 CodeStubInterfaceDescriptor* descriptor) {
99 static Register registers[] = { rbx }; 109 static Register registers[] = { rbx, rdx };
100 descriptor->register_param_count_ = 1; 110 descriptor->register_param_count_ = 2;
101 descriptor->register_params_ = registers; 111 descriptor->register_params_ = registers;
102 descriptor->deoptimization_handler_ = NULL; 112 descriptor->deoptimization_handler_ = NULL;
103 } 113 }
104 114
105 115
106 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 116 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
107 Isolate* isolate, 117 Isolate* isolate,
108 CodeStubInterfaceDescriptor* descriptor) { 118 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = { rdx, rax }; 119 static Register registers[] = { rdx, rax };
110 descriptor->register_param_count_ = 2; 120 descriptor->register_param_count_ = 2;
111 descriptor->register_params_ = registers; 121 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ = 122 descriptor->deoptimization_handler_ =
113 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 123 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
114 } 124 }
115 125
116 126
117 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( 127 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor(
118 Isolate* isolate, 128 Isolate* isolate,
119 CodeStubInterfaceDescriptor* descriptor) { 129 CodeStubInterfaceDescriptor* descriptor) {
120 static Register registers[] = { rdx, rax }; 130 static Register registers[] = { rdx, rax };
121 descriptor->register_param_count_ = 2; 131 descriptor->register_param_count_ = 2;
122 descriptor->register_params_ = registers; 132 descriptor->register_params_ = registers;
123 descriptor->deoptimization_handler_ = 133 descriptor->deoptimization_handler_ =
124 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 134 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
125 } 135 }
126 136
127 137
138 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
139 Isolate* isolate,
140 CodeStubInterfaceDescriptor* descriptor) {
141 static Register registers[] = { rcx, rbx, rax };
142 descriptor->register_param_count_ = 3;
143 descriptor->register_params_ = registers;
144 descriptor->deoptimization_handler_ =
145 Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry;
146 }
147
148
128 void LoadFieldStub::InitializeInterfaceDescriptor( 149 void LoadFieldStub::InitializeInterfaceDescriptor(
129 Isolate* isolate, 150 Isolate* isolate,
130 CodeStubInterfaceDescriptor* descriptor) { 151 CodeStubInterfaceDescriptor* descriptor) {
131 static Register registers[] = { rax }; 152 static Register registers[] = { rax };
132 descriptor->register_param_count_ = 1; 153 descriptor->register_param_count_ = 1;
133 descriptor->register_params_ = registers; 154 descriptor->register_params_ = registers;
134 descriptor->deoptimization_handler_ = NULL; 155 descriptor->deoptimization_handler_ = NULL;
135 } 156 }
136 157
137 158
138 void KeyedLoadFieldStub::InitializeInterfaceDescriptor( 159 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
139 Isolate* isolate, 160 Isolate* isolate,
140 CodeStubInterfaceDescriptor* descriptor) { 161 CodeStubInterfaceDescriptor* descriptor) {
141 static Register registers[] = { rdx }; 162 static Register registers[] = { rdx };
142 descriptor->register_param_count_ = 1; 163 descriptor->register_param_count_ = 1;
143 descriptor->register_params_ = registers; 164 descriptor->register_params_ = registers;
144 descriptor->deoptimization_handler_ = NULL; 165 descriptor->deoptimization_handler_ = NULL;
145 } 166 }
146 167
147 168
148 void KeyedArrayCallStub::InitializeInterfaceDescriptor(
149 Isolate* isolate,
150 CodeStubInterfaceDescriptor* descriptor) {
151 static Register registers[] = { rcx };
152 descriptor->register_param_count_ = 1;
153 descriptor->register_params_ = registers;
154 descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
155 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
156 descriptor->deoptimization_handler_ =
157 FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
158 }
159
160
161 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( 169 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
162 Isolate* isolate, 170 Isolate* isolate,
163 CodeStubInterfaceDescriptor* descriptor) { 171 CodeStubInterfaceDescriptor* descriptor) {
164 static Register registers[] = { rdx, rcx, rax }; 172 static Register registers[] = { rdx, rcx, rax };
165 descriptor->register_param_count_ = 3; 173 descriptor->register_param_count_ = 3;
166 descriptor->register_params_ = registers; 174 descriptor->register_params_ = registers;
167 descriptor->deoptimization_handler_ = 175 descriptor->deoptimization_handler_ =
168 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); 176 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
169 } 177 }
170 178
171 179
172 void TransitionElementsKindStub::InitializeInterfaceDescriptor( 180 void TransitionElementsKindStub::InitializeInterfaceDescriptor(
173 Isolate* isolate, 181 Isolate* isolate,
174 CodeStubInterfaceDescriptor* descriptor) { 182 CodeStubInterfaceDescriptor* descriptor) {
175 static Register registers[] = { rax, rbx }; 183 static Register registers[] = { rax, rbx };
176 descriptor->register_param_count_ = 2; 184 descriptor->register_param_count_ = 2;
177 descriptor->register_params_ = registers; 185 descriptor->register_params_ = registers;
178 descriptor->deoptimization_handler_ = 186 descriptor->deoptimization_handler_ =
179 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry; 187 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
180 } 188 }
181 189
182 190
183 static void InitializeArrayConstructorDescriptor( 191 static void InitializeArrayConstructorDescriptor(
184 Isolate* isolate, 192 Isolate* isolate,
185 CodeStubInterfaceDescriptor* descriptor, 193 CodeStubInterfaceDescriptor* descriptor,
186 int constant_stack_parameter_count) { 194 int constant_stack_parameter_count) {
187 // register state 195 // register state
188 // rax -- number of arguments 196 // rax -- number of arguments
189 // rdi -- function 197 // rdi -- function
190 // rbx -- type info cell with elements kind 198 // rbx -- allocation site with elements kind
191 static Register registers_variable_args[] = { rdi, rbx, rax }; 199 static Register registers_variable_args[] = { rdi, rbx, rax };
192 static Register registers_no_args[] = { rdi, rbx }; 200 static Register registers_no_args[] = { rdi, rbx };
193 201
194 if (constant_stack_parameter_count == 0) { 202 if (constant_stack_parameter_count == 0) {
195 descriptor->register_param_count_ = 2; 203 descriptor->register_param_count_ = 2;
196 descriptor->register_params_ = registers_no_args; 204 descriptor->register_params_ = registers_no_args;
197 } else { 205 } else {
198 // stack param count needs (constructor pointer, and single argument) 206 // stack param count needs (constructor pointer, and single argument)
199 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; 207 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
200 descriptor->stack_parameter_count_ = rax; 208 descriptor->stack_parameter_count_ = rax;
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 Isolate* isolate, 351 Isolate* isolate,
344 CodeStubInterfaceDescriptor* descriptor) { 352 CodeStubInterfaceDescriptor* descriptor) {
345 static Register registers[] = { rcx, rdx, rax }; 353 static Register registers[] = { rcx, rdx, rax };
346 descriptor->register_param_count_ = 3; 354 descriptor->register_param_count_ = 3;
347 descriptor->register_params_ = registers; 355 descriptor->register_params_ = registers;
348 descriptor->deoptimization_handler_ = 356 descriptor->deoptimization_handler_ =
349 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite); 357 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
350 } 358 }
351 359
352 360
353 void NewStringAddStub::InitializeInterfaceDescriptor( 361 void StringAddStub::InitializeInterfaceDescriptor(
354 Isolate* isolate, 362 Isolate* isolate,
355 CodeStubInterfaceDescriptor* descriptor) { 363 CodeStubInterfaceDescriptor* descriptor) {
356 static Register registers[] = { rdx, rax }; 364 static Register registers[] = { rdx, rax };
357 descriptor->register_param_count_ = 2; 365 descriptor->register_param_count_ = 2;
358 descriptor->register_params_ = registers; 366 descriptor->register_params_ = registers;
359 descriptor->deoptimization_handler_ = 367 descriptor->deoptimization_handler_ =
360 Runtime::FunctionForId(Runtime::kStringAdd)->entry; 368 Runtime::FunctionForId(Runtime::kStringAdd)->entry;
361 } 369 }
362 370
363 371
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
401 rcx, // name 409 rcx, // name
402 }; 410 };
403 static Representation representations[] = { 411 static Representation representations[] = {
404 Representation::Tagged(), // context 412 Representation::Tagged(), // context
405 Representation::Tagged(), // name 413 Representation::Tagged(), // name
406 }; 414 };
407 descriptor->register_param_count_ = 2; 415 descriptor->register_param_count_ = 2;
408 descriptor->register_params_ = registers; 416 descriptor->register_params_ = registers;
409 descriptor->param_representations_ = representations; 417 descriptor->param_representations_ = representations;
410 } 418 }
419 {
420 CallInterfaceDescriptor* descriptor =
421 isolate->call_descriptor(Isolate::CallHandler);
422 static Register registers[] = { rsi, // context
423 rdx, // receiver
424 };
425 static Representation representations[] = {
426 Representation::Tagged(), // context
427 Representation::Tagged(), // receiver
428 };
429 descriptor->register_param_count_ = 2;
430 descriptor->register_params_ = registers;
431 descriptor->param_representations_ = representations;
432 }
433 {
434 CallInterfaceDescriptor* descriptor =
435 isolate->call_descriptor(Isolate::ApiFunctionCall);
436 static Register registers[] = { rax, // callee
437 rbx, // call_data
438 rcx, // holder
439 rdx, // api_function_address
440 rsi, // context
441 };
442 static Representation representations[] = {
443 Representation::Tagged(), // callee
444 Representation::Tagged(), // call_data
445 Representation::Tagged(), // holder
446 Representation::External(), // api_function_address
447 Representation::Tagged(), // context
448 };
449 descriptor->register_param_count_ = 5;
450 descriptor->register_params_ = registers;
451 descriptor->param_representations_ = representations;
452 }
411 } 453 }
412 454
413 455
414 #define __ ACCESS_MASM(masm) 456 #define __ ACCESS_MASM(masm)
415 457
416 458
417 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 459 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
418 // Update the static counter each time a new code stub is generated. 460 // Update the static counter each time a new code stub is generated.
419 Isolate* isolate = masm->isolate(); 461 Isolate* isolate = masm->isolate();
420 isolate->counters()->code_stubs()->Increment(); 462 isolate->counters()->code_stubs()->Increment();
(...skipping 10 matching lines...) Expand all
431 __ push(descriptor->register_params_[i]); 473 __ push(descriptor->register_params_[i]);
432 } 474 }
433 ExternalReference miss = descriptor->miss_handler(); 475 ExternalReference miss = descriptor->miss_handler();
434 __ CallExternalReference(miss, descriptor->register_param_count_); 476 __ CallExternalReference(miss, descriptor->register_param_count_);
435 } 477 }
436 478
437 __ Ret(); 479 __ Ret();
438 } 480 }
439 481
440 482
441 void FastNewContextStub::Generate(MacroAssembler* masm) {
442 // Try to allocate the context in new space.
443 Label gc;
444 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
445 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
446 rax, rbx, rcx, &gc, TAG_OBJECT);
447
448 // Get the function from the stack.
449 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
450 __ movp(rcx, args.GetArgumentOperand(0));
451
452 // Set up the object header.
453 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
454 __ movp(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
455 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
456
457 // Set up the fixed slots.
458 __ Set(rbx, 0); // Set to NULL.
459 __ movp(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
460 __ movp(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
461 __ movp(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
462
463 // Copy the global object from the previous context.
464 __ movp(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
465 __ movp(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx);
466
467 // Initialize the rest of the slots to undefined.
468 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
469 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
470 __ movp(Operand(rax, Context::SlotOffset(i)), rbx);
471 }
472
473 // Return and remove the on-stack parameter.
474 __ movp(rsi, rax);
475 __ ret(1 * kPointerSize);
476
477 // Need to collect. Call into runtime system.
478 __ bind(&gc);
479 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
480 }
481
482
483 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
484 // Stack layout on entry:
485 //
486 // [rsp + (1 * kPointerSize)] : function
487 // [rsp + (2 * kPointerSize)] : serialized scope info
488
489 // Try to allocate the context in new space.
490 Label gc;
491 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
492 __ Allocate(FixedArray::SizeFor(length),
493 rax, rbx, rcx, &gc, TAG_OBJECT);
494
495 // Get the function from the stack.
496 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
497 __ movp(rcx, args.GetArgumentOperand(1));
498 // Get the serialized scope info from the stack.
499 __ movp(rbx, args.GetArgumentOperand(0));
500
501 // Set up the object header.
502 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
503 __ movp(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
504 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
505
506 // If this block context is nested in the native context we get a smi
507 // sentinel instead of a function. The block context should get the
508 // canonical empty function of the native context as its closure which
509 // we still have to look up.
510 Label after_sentinel;
511 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
512 if (FLAG_debug_code) {
513 __ cmpq(rcx, Immediate(0));
514 __ Assert(equal, kExpected0AsASmiSentinel);
515 }
516 __ movp(rcx, GlobalObjectOperand());
517 __ movp(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
518 __ movp(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
519 __ bind(&after_sentinel);
520
521 // Set up the fixed slots.
522 __ movp(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
523 __ movp(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
524 __ movp(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
525
526 // Copy the global object from the previous context.
527 __ movp(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
528 __ movp(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
529
530 // Initialize the rest of the slots to the hole value.
531 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
532 for (int i = 0; i < slots_; i++) {
533 __ movp(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
534 }
535
536 // Return and remove the on-stack parameter.
537 __ movp(rsi, rax);
538 __ ret(2 * kPointerSize);
539
540 // Need to collect. Call into runtime system.
541 __ bind(&gc);
542 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
543 }
544
545
546 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 483 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
547 __ PushCallerSaved(save_doubles_); 484 __ PushCallerSaved(save_doubles_);
548 const int argument_count = 1; 485 const int argument_count = 1;
549 __ PrepareCallCFunction(argument_count); 486 __ PrepareCallCFunction(argument_count);
550 __ LoadAddress(arg_reg_1, 487 __ LoadAddress(arg_reg_1,
551 ExternalReference::isolate_address(masm->isolate())); 488 ExternalReference::isolate_address(masm->isolate()));
552 489
553 AllowExternalCallThatCantCauseGC scope(masm); 490 AllowExternalCallThatCantCauseGC scope(masm);
554 __ CallCFunction( 491 __ CallCFunction(
555 ExternalReference::store_buffer_overflow_function(masm->isolate()), 492 ExternalReference::store_buffer_overflow_function(masm->isolate()),
(...skipping 1358 matching lines...) Expand 10 before | Expand all | Expand 10 after
1914 1851
1915 // (11) Sliced string. Replace subject with parent. Go to (5a). 1852 // (11) Sliced string. Replace subject with parent. Go to (5a).
1916 // Load offset into r14 and replace subject string with parent. 1853 // Load offset into r14 and replace subject string with parent.
1917 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); 1854 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1918 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); 1855 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
1919 __ jmp(&check_underlying); 1856 __ jmp(&check_underlying);
1920 #endif // V8_INTERPRETED_REGEXP 1857 #endif // V8_INTERPRETED_REGEXP
1921 } 1858 }
1922 1859
1923 1860
1924 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
1925 const int kMaxInlineLength = 100;
1926 Label slowcase;
1927 Label done;
1928 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1929 __ movp(r8, args.GetArgumentOperand(0));
1930 __ JumpIfNotSmi(r8, &slowcase);
1931 __ SmiToInteger32(rbx, r8);
1932 __ cmpl(rbx, Immediate(kMaxInlineLength));
1933 __ j(above, &slowcase);
1934 // Smi-tagging is equivalent to multiplying by 2.
1935 STATIC_ASSERT(kSmiTag == 0);
1936 STATIC_ASSERT(kSmiTagSize == 1);
1937 // Allocate RegExpResult followed by FixedArray with size in rbx.
1938 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
1939 // Elements: [Map][Length][..elements..]
1940 __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
1941 times_pointer_size,
1942 rbx, // In: Number of elements.
1943 rax, // Out: Start of allocation (tagged).
1944 rcx, // Out: End of allocation.
1945 rdx, // Scratch register
1946 &slowcase,
1947 TAG_OBJECT);
1948 // rax: Start of allocated area, object-tagged.
1949 // rbx: Number of array elements as int32.
1950 // r8: Number of array elements as smi.
1951
1952 // Set JSArray map to global.regexp_result_map().
1953 __ movp(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
1954 __ movp(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
1955 __ movp(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
1956 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rdx);
1957
1958 // Set empty properties FixedArray.
1959 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
1960 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
1961
1962 // Set elements to point to FixedArray allocated right after the JSArray.
1963 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
1964 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1965
1966 // Set input, index and length fields from arguments.
1967 __ movp(r8, args.GetArgumentOperand(2));
1968 __ movp(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
1969 __ movp(r8, args.GetArgumentOperand(1));
1970 __ movp(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
1971 __ movp(r8, args.GetArgumentOperand(0));
1972 __ movp(FieldOperand(rax, JSArray::kLengthOffset), r8);
1973
1974 // Fill out the elements FixedArray.
1975 // rax: JSArray.
1976 // rcx: FixedArray.
1977 // rbx: Number of elements in array as int32.
1978
1979 // Set map.
1980 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
1981 __ movp(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
1982 // Set length.
1983 __ Integer32ToSmi(rdx, rbx);
1984 __ movp(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
1985 // Fill contents of fixed-array with undefined.
1986 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
1987 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
1988 // Fill fixed array elements with undefined.
1989 // rax: JSArray.
1990 // rbx: Number of elements in array that remains to be filled, as int32.
1991 // rcx: Start of elements in FixedArray.
1992 // rdx: undefined.
1993 Label loop;
1994 __ testl(rbx, rbx);
1995 __ bind(&loop);
1996 __ j(less_equal, &done); // Jump if rcx is negative or zero.
1997 __ subl(rbx, Immediate(1));
1998 __ movp(Operand(rcx, rbx, times_pointer_size, 0), rdx);
1999 __ jmp(&loop);
2000
2001 __ bind(&done);
2002 __ ret(3 * kPointerSize);
2003
2004 __ bind(&slowcase);
2005 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2006 }
2007
2008
2009 static int NegativeComparisonResult(Condition cc) { 1861 static int NegativeComparisonResult(Condition cc) {
2010 ASSERT(cc != equal); 1862 ASSERT(cc != equal);
2011 ASSERT((cc == less) || (cc == less_equal) 1863 ASSERT((cc == less) || (cc == less_equal)
2012 || (cc == greater) || (cc == greater_equal)); 1864 || (cc == greater) || (cc == greater_equal));
2013 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 1865 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2014 } 1866 }
2015 1867
2016 1868
2017 static void CheckInputType(MacroAssembler* masm, 1869 static void CheckInputType(MacroAssembler* masm,
2018 Register input, 1870 Register input,
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after
2302 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 2154 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2303 // tagged as a small integer. 2155 // tagged as a small integer.
2304 __ InvokeBuiltin(builtin, JUMP_FUNCTION); 2156 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2305 2157
2306 __ bind(&miss); 2158 __ bind(&miss);
2307 GenerateMiss(masm); 2159 GenerateMiss(masm);
2308 } 2160 }
2309 2161
2310 2162
2311 static void GenerateRecordCallTarget(MacroAssembler* masm) { 2163 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2312 // Cache the called function in a global property cell. Cache states 2164 // Cache the called function in a feedback vector slot. Cache states
2313 // are uninitialized, monomorphic (indicated by a JSFunction), and 2165 // are uninitialized, monomorphic (indicated by a JSFunction), and
2314 // megamorphic. 2166 // megamorphic.
2315 // rax : number of arguments to the construct function 2167 // rax : number of arguments to the construct function
2316 // rbx : cache cell for call target 2168 // rbx : Feedback vector
2169 // rdx : slot in feedback vector (Smi)
2317 // rdi : the function to call 2170 // rdi : the function to call
2318 Isolate* isolate = masm->isolate(); 2171 Isolate* isolate = masm->isolate();
2319 Label initialize, done, miss, megamorphic, not_array_function; 2172 Label initialize, done, miss, megamorphic, not_array_function,
2173 done_no_smi_convert;
2320 2174
2321 // Load the cache state into rcx. 2175 // Load the cache state into rcx.
2322 __ movp(rcx, FieldOperand(rbx, Cell::kValueOffset)); 2176 __ SmiToInteger32(rdx, rdx);
2177 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2178 FixedArray::kHeaderSize));
2323 2179
2324 // A monomorphic cache hit or an already megamorphic state: invoke the 2180 // A monomorphic cache hit or an already megamorphic state: invoke the
2325 // function without changing the state. 2181 // function without changing the state.
2326 __ cmpq(rcx, rdi); 2182 __ cmpq(rcx, rdi);
2327 __ j(equal, &done); 2183 __ j(equal, &done);
2328 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); 2184 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate));
2329 __ j(equal, &done); 2185 __ j(equal, &done);
2330 2186
2331 // If we came here, we need to see if we are the array function. 2187 // If we came here, we need to see if we are the array function.
2332 // If we didn't have a matching function, and we didn't find the megamorph 2188 // If we didn't have a matching function, and we didn't find the megamorph
2333 // sentinel, then we have in the cell either some other function or an 2189 // sentinel, then we have in the slot either some other function or an
2334 // AllocationSite. Do a map check on the object in rcx. 2190 // AllocationSite. Do a map check on the object in rcx.
2335 Handle<Map> allocation_site_map = 2191 Handle<Map> allocation_site_map =
2336 masm->isolate()->factory()->allocation_site_map(); 2192 masm->isolate()->factory()->allocation_site_map();
2337 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); 2193 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
2338 __ j(not_equal, &miss); 2194 __ j(not_equal, &miss);
2339 2195
2340 // Make sure the function is the Array() function 2196 // Make sure the function is the Array() function
2341 __ LoadArrayFunction(rcx); 2197 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2342 __ cmpq(rdi, rcx); 2198 __ cmpq(rdi, rcx);
2343 __ j(not_equal, &megamorphic); 2199 __ j(not_equal, &megamorphic);
2344 __ jmp(&done); 2200 __ jmp(&done);
2345 2201
2346 __ bind(&miss); 2202 __ bind(&miss);
2347 2203
2348 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 2204 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2349 // megamorphic. 2205 // megamorphic.
2350 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); 2206 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate));
2351 __ j(equal, &initialize); 2207 __ j(equal, &initialize);
2352 // MegamorphicSentinel is an immortal immovable object (undefined) so no 2208 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2353 // write-barrier is needed. 2209 // write-barrier is needed.
2354 __ bind(&megamorphic); 2210 __ bind(&megamorphic);
2355 __ Move(FieldOperand(rbx, Cell::kValueOffset), 2211 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2356 TypeFeedbackCells::MegamorphicSentinel(isolate)); 2212 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2357 __ jmp(&done); 2213 __ jmp(&done);
2358 2214
2359 // An uninitialized cache is patched with the function or sentinel to 2215 // An uninitialized cache is patched with the function or sentinel to
2360 // indicate the ElementsKind if function is the Array constructor. 2216 // indicate the ElementsKind if function is the Array constructor.
2361 __ bind(&initialize); 2217 __ bind(&initialize);
2362 // Make sure the function is the Array() function 2218 // Make sure the function is the Array() function
2363 __ LoadArrayFunction(rcx); 2219 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2364 __ cmpq(rdi, rcx); 2220 __ cmpq(rdi, rcx);
2365 __ j(not_equal, &not_array_function); 2221 __ j(not_equal, &not_array_function);
2366 2222
2367 // The target function is the Array constructor, 2223 // The target function is the Array constructor,
2368 // Create an AllocationSite if we don't already have it, store it in the cell 2224 // Create an AllocationSite if we don't already have it, store it in the slot.
2369 { 2225 {
2370 FrameScope scope(masm, StackFrame::INTERNAL); 2226 FrameScope scope(masm, StackFrame::INTERNAL);
2371 2227
2372 // Arguments register must be smi-tagged to call out. 2228 // Arguments register must be smi-tagged to call out.
2373 __ Integer32ToSmi(rax, rax); 2229 __ Integer32ToSmi(rax, rax);
2374 __ push(rax); 2230 __ push(rax);
2375 __ push(rdi); 2231 __ push(rdi);
2232 __ Integer32ToSmi(rdx, rdx);
2233 __ push(rdx);
2376 __ push(rbx); 2234 __ push(rbx);
2377 2235
2378 CreateAllocationSiteStub create_stub; 2236 CreateAllocationSiteStub create_stub;
2379 __ CallStub(&create_stub); 2237 __ CallStub(&create_stub);
2380 2238
2381 __ pop(rbx); 2239 __ pop(rbx);
2240 __ pop(rdx);
2382 __ pop(rdi); 2241 __ pop(rdi);
2383 __ pop(rax); 2242 __ pop(rax);
2384 __ SmiToInteger32(rax, rax); 2243 __ SmiToInteger32(rax, rax);
2385 } 2244 }
2386 __ jmp(&done); 2245 __ jmp(&done_no_smi_convert);
2387 2246
2388 __ bind(&not_array_function); 2247 __ bind(&not_array_function);
2389 __ movp(FieldOperand(rbx, Cell::kValueOffset), rdi); 2248 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2390 // No need for a write barrier here - cells are rescanned. 2249 rdi);
2250
2251 // We won't need rdx or rbx anymore, just save rdi
2252 __ push(rdi);
2253 __ push(rbx);
2254 __ push(rdx);
2255 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
2256 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2257 __ pop(rdx);
2258 __ pop(rbx);
2259 __ pop(rdi);
2391 2260
2392 __ bind(&done); 2261 __ bind(&done);
2262 __ Integer32ToSmi(rdx, rdx);
2263
2264 __ bind(&done_no_smi_convert);
2393 } 2265 }
2394 2266
2395 2267
2396 void CallFunctionStub::Generate(MacroAssembler* masm) { 2268 void CallFunctionStub::Generate(MacroAssembler* masm) {
2397 // rbx : cache cell for call target 2269 // rbx : feedback vector
2270 // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
2398 // rdi : the function to call 2271 // rdi : the function to call
2399 Isolate* isolate = masm->isolate(); 2272 Isolate* isolate = masm->isolate();
2400 Label slow, non_function; 2273 Label slow, non_function, wrap, cont;
2401 StackArgumentsAccessor args(rsp, argc_); 2274 StackArgumentsAccessor args(rsp, argc_);
2402 2275
2403 // Check that the function really is a JavaScript function. 2276 if (NeedsChecks()) {
2404 __ JumpIfSmi(rdi, &non_function); 2277 // Check that the function really is a JavaScript function.
2278 __ JumpIfSmi(rdi, &non_function);
2405 2279
2406 // Goto slow case if we do not have a function. 2280 // Goto slow case if we do not have a function.
2407 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2281 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2408 __ j(not_equal, &slow); 2282 __ j(not_equal, &slow);
2409 2283
2410 if (RecordCallTarget()) { 2284 if (RecordCallTarget()) {
2411 GenerateRecordCallTarget(masm); 2285 GenerateRecordCallTarget(masm);
2286 }
2412 } 2287 }
2413 2288
2414 // Fast-case: Just invoke the function. 2289 // Fast-case: Just invoke the function.
2415 ParameterCount actual(argc_); 2290 ParameterCount actual(argc_);
2416 2291
2292 if (CallAsMethod()) {
2293 if (NeedsChecks()) {
2294 // Do not transform the receiver for strict mode functions.
2295 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2296 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
2297 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2298 __ j(not_equal, &cont);
2299
2300 // Do not transform the receiver for natives.
2301 // SharedFunctionInfo is already loaded into rcx.
2302 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
2303 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2304 __ j(not_equal, &cont);
2305 }
2306
2307
2308 // Load the receiver from the stack.
2309 __ movp(rax, args.GetReceiverOperand());
2310
2311 if (NeedsChecks()) {
2312 __ JumpIfSmi(rax, &wrap);
2313
2314 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2315 __ j(below, &wrap);
2316 } else {
2317 __ jmp(&wrap);
2318 }
2319
2320 __ bind(&cont);
2321 }
2417 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); 2322 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2418 2323
2419 // Slow-case: Non-function called. 2324 if (NeedsChecks()) {
2420 __ bind(&slow); 2325 // Slow-case: Non-function called.
2421 if (RecordCallTarget()) { 2326 __ bind(&slow);
2422 // If there is a call target cache, mark it megamorphic in the 2327 if (RecordCallTarget()) {
2423 // non-function case. MegamorphicSentinel is an immortal immovable 2328 // If there is a call target cache, mark it megamorphic in the
2424 // object (undefined) so no write barrier is needed. 2329 // non-function case. MegamorphicSentinel is an immortal immovable
2425 __ Move(FieldOperand(rbx, Cell::kValueOffset), 2330 // object (undefined) so no write barrier is needed.
2426 TypeFeedbackCells::MegamorphicSentinel(isolate)); 2331 __ SmiToInteger32(rdx, rdx);
2427 } 2332 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2428 // Check for function proxy. 2333 FixedArray::kHeaderSize),
2429 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2334 TypeFeedbackInfo::MegamorphicSentinel(isolate));
2430 __ j(not_equal, &non_function); 2335 __ Integer32ToSmi(rdx, rdx);
2431 __ PopReturnAddressTo(rcx); 2336 }
2432 __ push(rdi); // put proxy as additional argument under return address 2337 // Check for function proxy.
2433 __ PushReturnAddressFrom(rcx); 2338 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2434 __ Set(rax, argc_ + 1); 2339 __ j(not_equal, &non_function);
2435 __ Set(rbx, 0); 2340 __ PopReturnAddressTo(rcx);
2436 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2341 __ push(rdi); // put proxy as additional argument under return address
2437 { 2342 __ PushReturnAddressFrom(rcx);
2343 __ Set(rax, argc_ + 1);
2344 __ Set(rbx, 0);
2345 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2346 {
2347 Handle<Code> adaptor =
2348 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2349 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2350 }
2351
2352 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2353 // of the original receiver from the call site).
2354 __ bind(&non_function);
2355 __ movp(args.GetReceiverOperand(), rdi);
2356 __ Set(rax, argc_);
2357 __ Set(rbx, 0);
2358 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2438 Handle<Code> adaptor = 2359 Handle<Code> adaptor =
2439 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2360 isolate->builtins()->ArgumentsAdaptorTrampoline();
2440 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2361 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2441 } 2362 }
2442 2363
2443 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2364 if (CallAsMethod()) {
2444 // of the original receiver from the call site). 2365 __ bind(&wrap);
2445 __ bind(&non_function); 2366 // Wrap the receiver and patch it back onto the stack.
2446 __ movp(args.GetReceiverOperand(), rdi); 2367 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2447 __ Set(rax, argc_); 2368 __ push(rdi);
2448 __ Set(rbx, 0); 2369 __ push(rax);
2449 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2370 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2450 Handle<Code> adaptor = 2371 __ pop(rdi);
2451 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2372 }
2452 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2373 __ movp(args.GetReceiverOperand(), rax);
2374 __ jmp(&cont);
2375 }
2453 } 2376 }
2454 2377
2455 2378
2456 void CallConstructStub::Generate(MacroAssembler* masm) { 2379 void CallConstructStub::Generate(MacroAssembler* masm) {
2457 // rax : number of arguments 2380 // rax : number of arguments
2458 // rbx : cache cell for call target 2381 // rbx : feedback vector
2382 // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
2459 // rdi : constructor function 2383 // rdi : constructor function
2460 Label slow, non_function_call; 2384 Label slow, non_function_call;
2461 2385
2462 // Check that function is not a smi. 2386 // Check that function is not a smi.
2463 __ JumpIfSmi(rdi, &non_function_call); 2387 __ JumpIfSmi(rdi, &non_function_call);
2464 // Check that function is a JSFunction. 2388 // Check that function is a JSFunction.
2465 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2389 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2466 __ j(not_equal, &slow); 2390 __ j(not_equal, &slow);
2467 2391
2468 if (RecordCallTarget()) { 2392 if (RecordCallTarget()) {
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after
2730 // Do space-specific GC and retry runtime call. 2654 // Do space-specific GC and retry runtime call.
2731 GenerateCore(masm, 2655 GenerateCore(masm,
2732 &throw_normal_exception, 2656 &throw_normal_exception,
2733 &throw_termination_exception, 2657 &throw_termination_exception,
2734 &throw_out_of_memory_exception, 2658 &throw_out_of_memory_exception,
2735 true, 2659 true,
2736 false); 2660 false);
2737 2661
2738 // Do full GC and retry runtime call one final time. 2662 // Do full GC and retry runtime call one final time.
2739 Failure* failure = Failure::InternalError(); 2663 Failure* failure = Failure::InternalError();
2740 __ Move(rax, failure, RelocInfo::NONE64); 2664 __ Move(rax, failure, Assembler::RelocInfoNone());
2741 GenerateCore(masm, 2665 GenerateCore(masm,
2742 &throw_normal_exception, 2666 &throw_normal_exception,
2743 &throw_termination_exception, 2667 &throw_termination_exception,
2744 &throw_out_of_memory_exception, 2668 &throw_out_of_memory_exception,
2745 true, 2669 true,
2746 true); 2670 true);
2747 2671
2748 __ bind(&throw_out_of_memory_exception); 2672 __ bind(&throw_out_of_memory_exception);
2749 // Set external caught exception to false. 2673 // Set external caught exception to false.
2750 Isolate* isolate = masm->isolate(); 2674 Isolate* isolate = masm->isolate();
2751 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress, 2675 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
2752 isolate); 2676 isolate);
2753 __ Set(rax, static_cast<int64_t>(false)); 2677 __ Set(rax, static_cast<int64_t>(false));
2754 __ Store(external_caught, rax); 2678 __ Store(external_caught, rax);
2755 2679
2756 // Set pending exception and rax to out of memory exception. 2680 // Set pending exception and rax to out of memory exception.
2757 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 2681 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2758 isolate); 2682 isolate);
2759 Label already_have_failure; 2683 Label already_have_failure;
2760 JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure); 2684 JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure);
2761 __ Move(rax, Failure::OutOfMemoryException(0x1), RelocInfo::NONE64); 2685 __ Move(rax, Failure::OutOfMemoryException(0x1), Assembler::RelocInfoNone());
2762 __ bind(&already_have_failure); 2686 __ bind(&already_have_failure);
2763 __ Store(pending_exception, rax); 2687 __ Store(pending_exception, rax);
2764 // Fall through to the next label. 2688 // Fall through to the next label.
2765 2689
2766 __ bind(&throw_termination_exception); 2690 __ bind(&throw_termination_exception);
2767 __ ThrowUncatchable(rax); 2691 __ ThrowUncatchable(rax);
2768 2692
2769 __ bind(&throw_normal_exception); 2693 __ bind(&throw_normal_exception);
2770 __ Throw(rax); 2694 __ Throw(rax);
2771 } 2695 }
2772 2696
2773 2697
2774 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { 2698 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
2775 Label invoke, handler_entry, exit; 2699 Label invoke, handler_entry, exit;
2776 Label not_outermost_js, not_outermost_js_2; 2700 Label not_outermost_js, not_outermost_js_2;
2777 2701
2778 ProfileEntryHookStub::MaybeCallEntryHook(masm); 2702 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2779 2703
2780 { // NOLINT. Scope block confuses linter. 2704 { // NOLINT. Scope block confuses linter.
2781 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); 2705 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
2782 // Set up frame. 2706 // Set up frame.
2783 __ push(rbp); 2707 __ push(rbp);
2784 __ movp(rbp, rsp); 2708 __ movp(rbp, rsp);
2785 2709
2786 // Push the stack frame type marker twice. 2710 // Push the stack frame type marker twice.
2787 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 2711 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2788 // Scratch register is neither callee-save, nor an argument register on any 2712 // Scratch register is neither callee-save, nor an argument register on any
2789 // platform. It's free to use at this point. 2713 // platform. It's free to use at this point.
2790 // Cannot use smi-register for loading yet. 2714 // Cannot use smi-register for loading yet.
2791 __ Move(kScratchRegister, Smi::FromInt(marker), RelocInfo::NONE64); 2715 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
2792 __ push(kScratchRegister); // context slot 2716 __ push(kScratchRegister); // context slot
2793 __ push(kScratchRegister); // function slot 2717 __ push(kScratchRegister); // function slot
2794 // Save callee-saved registers (X64/Win64 calling conventions). 2718 // Save callee-saved registers (X64/Win64 calling conventions).
2795 __ push(r12); 2719 __ push(r12);
2796 __ push(r13); 2720 __ push(r13);
2797 __ push(r14); 2721 __ push(r14);
2798 __ push(r15); 2722 __ push(r15);
2799 #ifdef _WIN64 2723 #ifdef _WIN64
2800 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 2724 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2801 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 2725 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2849 // Jump to a faked try block that does the invoke, with a faked catch 2773 // Jump to a faked try block that does the invoke, with a faked catch
2850 // block that sets the pending exception. 2774 // block that sets the pending exception.
2851 __ jmp(&invoke); 2775 __ jmp(&invoke);
2852 __ bind(&handler_entry); 2776 __ bind(&handler_entry);
2853 handler_offset_ = handler_entry.pos(); 2777 handler_offset_ = handler_entry.pos();
2854 // Caught exception: Store result (exception) in the pending exception 2778 // Caught exception: Store result (exception) in the pending exception
2855 // field in the JSEnv and return a failure sentinel. 2779 // field in the JSEnv and return a failure sentinel.
2856 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 2780 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2857 isolate); 2781 isolate);
2858 __ Store(pending_exception, rax); 2782 __ Store(pending_exception, rax);
2859 __ Move(rax, Failure::Exception(), RelocInfo::NONE64); 2783 __ Move(rax, Failure::Exception(), Assembler::RelocInfoNone());
2860 __ jmp(&exit); 2784 __ jmp(&exit);
2861 2785
2862 // Invoke: Link this frame into the handler chain. There's only one 2786 // Invoke: Link this frame into the handler chain. There's only one
2863 // handler block in this code object, so its index is 0. 2787 // handler block in this code object, so its index is 0.
2864 __ bind(&invoke); 2788 __ bind(&invoke);
2865 __ PushTryHandler(StackHandler::JS_ENTRY, 0); 2789 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2866 2790
2867 // Clear any pending exceptions. 2791 // Clear any pending exceptions.
2868 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); 2792 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2869 __ Store(pending_exception, rax); 2793 __ Store(pending_exception, rax);
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
3011 2935
3012 // Register mapping: 2936 // Register mapping:
3013 // rax is object map. 2937 // rax is object map.
3014 // rdx is function. 2938 // rdx is function.
3015 // rbx is function prototype. 2939 // rbx is function prototype.
3016 if (!HasCallSiteInlineCheck()) { 2940 if (!HasCallSiteInlineCheck()) {
3017 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 2941 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3018 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 2942 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3019 } else { 2943 } else {
3020 // Get return address and delta to inlined map check. 2944 // Get return address and delta to inlined map check.
3021 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); 2945 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
3022 __ subq(kScratchRegister, args.GetArgumentOperand(2)); 2946 __ subq(kScratchRegister, args.GetArgumentOperand(2));
3023 if (FLAG_debug_code) { 2947 if (FLAG_debug_code) {
3024 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 2948 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3025 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 2949 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3026 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); 2950 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
3027 } 2951 }
3028 __ movp(kScratchRegister, 2952 __ movp(kScratchRegister,
3029 Operand(kScratchRegister, kOffsetToMapCheckValue)); 2953 Operand(kScratchRegister, kOffsetToMapCheckValue));
3030 __ movp(Operand(kScratchRegister, 0), rax); 2954 __ movp(Operand(kScratchRegister, 0), rax);
3031 } 2955 }
(...skipping 20 matching lines...) Expand all
3052 // Store bitwise zero in the cache. This is a Smi in GC terms. 2976 // Store bitwise zero in the cache. This is a Smi in GC terms.
3053 STATIC_ASSERT(kSmiTag == 0); 2977 STATIC_ASSERT(kSmiTag == 0);
3054 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 2978 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3055 } else { 2979 } else {
3056 // Store offset of true in the root array at the inline check site. 2980 // Store offset of true in the root array at the inline check site.
3057 int true_offset = 0x100 + 2981 int true_offset = 0x100 +
3058 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 2982 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3059 // Assert it is a 1-byte signed value. 2983 // Assert it is a 1-byte signed value.
3060 ASSERT(true_offset >= 0 && true_offset < 0x100); 2984 ASSERT(true_offset >= 0 && true_offset < 0x100);
3061 __ movl(rax, Immediate(true_offset)); 2985 __ movl(rax, Immediate(true_offset));
3062 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); 2986 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
3063 __ subq(kScratchRegister, args.GetArgumentOperand(2)); 2987 __ subq(kScratchRegister, args.GetArgumentOperand(2));
3064 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 2988 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3065 if (FLAG_debug_code) { 2989 if (FLAG_debug_code) {
3066 __ movl(rax, Immediate(kWordBeforeResultValue)); 2990 __ movl(rax, Immediate(kWordBeforeResultValue));
3067 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 2991 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3068 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 2992 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3069 } 2993 }
3070 __ Set(rax, 0); 2994 __ Set(rax, 0);
3071 } 2995 }
3072 __ ret((2 + extra_argument_offset) * kPointerSize); 2996 __ ret((2 + extra_argument_offset) * kPointerSize);
3073 2997
3074 __ bind(&is_not_instance); 2998 __ bind(&is_not_instance);
3075 if (!HasCallSiteInlineCheck()) { 2999 if (!HasCallSiteInlineCheck()) {
3076 // We have to store a non-zero value in the cache. 3000 // We have to store a non-zero value in the cache.
3077 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 3001 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3078 } else { 3002 } else {
3079 // Store offset of false in the root array at the inline check site. 3003 // Store offset of false in the root array at the inline check site.
3080 int false_offset = 0x100 + 3004 int false_offset = 0x100 +
3081 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 3005 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3082 // Assert it is a 1-byte signed value. 3006 // Assert it is a 1-byte signed value.
3083 ASSERT(false_offset >= 0 && false_offset < 0x100); 3007 ASSERT(false_offset >= 0 && false_offset < 0x100);
3084 __ movl(rax, Immediate(false_offset)); 3008 __ movl(rax, Immediate(false_offset));
3085 __ movp(kScratchRegister, StackOperandForReturnAddress(0)); 3009 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
3086 __ subq(kScratchRegister, args.GetArgumentOperand(2)); 3010 __ subq(kScratchRegister, args.GetArgumentOperand(2));
3087 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3011 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3088 if (FLAG_debug_code) { 3012 if (FLAG_debug_code) {
3089 __ movl(rax, Immediate(kWordBeforeResultValue)); 3013 __ movl(rax, Immediate(kWordBeforeResultValue));
3090 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3014 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3091 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 3015 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3092 } 3016 }
3093 } 3017 }
3094 __ ret((2 + extra_argument_offset) * kPointerSize); 3018 __ ret((2 + extra_argument_offset) * kPointerSize);
3095 3019
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
3237 if (!result_.is(rax)) { 3161 if (!result_.is(rax)) {
3238 __ movp(result_, rax); 3162 __ movp(result_, rax);
3239 } 3163 }
3240 call_helper.AfterCall(masm); 3164 call_helper.AfterCall(masm);
3241 __ jmp(&exit_); 3165 __ jmp(&exit_);
3242 3166
3243 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 3167 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3244 } 3168 }
3245 3169
3246 3170
3247 void StringAddStub::Generate(MacroAssembler* masm) {
3248 Label call_runtime, call_builtin;
3249 Builtins::JavaScript builtin_id = Builtins::ADD;
3250
3251 // Load the two arguments.
3252 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3253 __ movp(rax, args.GetArgumentOperand(0)); // First argument (left).
3254 __ movp(rdx, args.GetArgumentOperand(1)); // Second argument (right).
3255
3256 // Make sure that both arguments are strings if not known in advance.
3257 // Otherwise, at least one of the arguments is definitely a string,
3258 // and we convert the one that is not known to be a string.
3259 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3260 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
3261 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
3262 __ JumpIfSmi(rax, &call_runtime);
3263 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
3264 __ j(above_equal, &call_runtime);
3265
3266 // First argument is a a string, test second.
3267 __ JumpIfSmi(rdx, &call_runtime);
3268 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
3269 __ j(above_equal, &call_runtime);
3270 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
3271 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
3272 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
3273 &call_builtin);
3274 builtin_id = Builtins::STRING_ADD_RIGHT;
3275 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
3276 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
3277 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
3278 &call_builtin);
3279 builtin_id = Builtins::STRING_ADD_LEFT;
3280 }
3281
3282 // Both arguments are strings.
3283 // rax: first string
3284 // rdx: second string
3285 // Check if either of the strings are empty. In that case return the other.
3286 Label second_not_zero_length, both_not_zero_length;
3287 __ movp(rcx, FieldOperand(rdx, String::kLengthOffset));
3288 __ SmiTest(rcx);
3289 __ j(not_zero, &second_not_zero_length, Label::kNear);
3290 // Second string is empty, result is first string which is already in rax.
3291 Counters* counters = masm->isolate()->counters();
3292 __ IncrementCounter(counters->string_add_native(), 1);
3293 __ ret(2 * kPointerSize);
3294 __ bind(&second_not_zero_length);
3295 __ movp(rbx, FieldOperand(rax, String::kLengthOffset));
3296 __ SmiTest(rbx);
3297 __ j(not_zero, &both_not_zero_length, Label::kNear);
3298 // First string is empty, result is second string which is in rdx.
3299 __ movp(rax, rdx);
3300 __ IncrementCounter(counters->string_add_native(), 1);
3301 __ ret(2 * kPointerSize);
3302
3303 // Both strings are non-empty.
3304 // rax: first string
3305 // rbx: length of first string
3306 // rcx: length of second string
3307 // rdx: second string
3308 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
3309 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
3310 Label string_add_flat_result, longer_than_two;
3311 __ bind(&both_not_zero_length);
3312
3313 // If arguments where known to be strings, maps are not loaded to r8 and r9
3314 // by the code above.
3315 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
3316 __ movp(r8, FieldOperand(rax, HeapObject::kMapOffset));
3317 __ movp(r9, FieldOperand(rdx, HeapObject::kMapOffset));
3318 }
3319 // Get the instance types of the two strings as they will be needed soon.
3320 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
3321 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
3322
3323 // Look at the length of the result of adding the two strings.
3324 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
3325 __ SmiAdd(rbx, rbx, rcx);
3326 // Use the string table when adding two one character strings, as it
3327 // helps later optimizations to return an internalized string here.
3328 __ SmiCompare(rbx, Smi::FromInt(2));
3329 __ j(not_equal, &longer_than_two);
3330
3331 // Check that both strings are non-external ASCII strings.
3332 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
3333 &call_runtime);
3334
3335 // Get the two characters forming the sub string.
3336 __ movzxbq(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3337 __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
3338
3339 // Try to lookup two character string in string table. If it is not found
3340 // just allocate a new one.
3341 Label make_two_character_string, make_flat_ascii_string;
3342 StringHelper::GenerateTwoCharacterStringTableProbe(
3343 masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
3344 __ IncrementCounter(counters->string_add_native(), 1);
3345 __ ret(2 * kPointerSize);
3346
3347 __ bind(&make_two_character_string);
3348 __ Set(rdi, 2);
3349 __ AllocateAsciiString(rax, rdi, r8, r9, r11, &call_runtime);
3350 // rbx - first byte: first character
3351 // rbx - second byte: *maybe* second character
3352 // Make sure that the second byte of rbx contains the second character.
3353 __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
3354 __ shll(rcx, Immediate(kBitsPerByte));
3355 __ orl(rbx, rcx);
3356 // Write both characters to the new string.
3357 __ movw(FieldOperand(rax, SeqOneByteString::kHeaderSize), rbx);
3358 __ IncrementCounter(counters->string_add_native(), 1);
3359 __ ret(2 * kPointerSize);
3360
3361 __ bind(&longer_than_two);
3362 // Check if resulting string will be flat.
3363 __ SmiCompare(rbx, Smi::FromInt(ConsString::kMinLength));
3364 __ j(below, &string_add_flat_result);
3365 // Handle exceptionally long strings in the runtime system.
3366 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
3367 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
3368 __ j(above, &call_runtime);
3369
3370 // If result is not supposed to be flat, allocate a cons string object. If
3371 // both strings are ASCII the result is an ASCII cons string.
3372 // rax: first string
3373 // rbx: length of resulting flat string
3374 // rdx: second string
3375 // r8: instance type of first string
3376 // r9: instance type of second string
3377 Label non_ascii, allocated, ascii_data;
3378 __ movl(rcx, r8);
3379 __ and_(rcx, r9);
3380 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3381 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3382 __ testl(rcx, Immediate(kStringEncodingMask));
3383 __ j(zero, &non_ascii);
3384 __ bind(&ascii_data);
3385 // Allocate an ASCII cons string.
3386 __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime);
3387 __ bind(&allocated);
3388 // Fill the fields of the cons string.
3389 __ movp(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
3390 __ movp(FieldOperand(rcx, ConsString::kHashFieldOffset),
3391 Immediate(String::kEmptyHashField));
3392
3393 Label skip_write_barrier, after_writing;
3394 ExternalReference high_promotion_mode = ExternalReference::
3395 new_space_high_promotion_mode_active_address(masm->isolate());
3396 __ Load(rbx, high_promotion_mode);
3397 __ testb(rbx, Immediate(1));
3398 __ j(zero, &skip_write_barrier);
3399
3400 __ movp(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3401 __ RecordWriteField(rcx,
3402 ConsString::kFirstOffset,
3403 rax,
3404 rbx,
3405 kDontSaveFPRegs);
3406 __ movp(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3407 __ RecordWriteField(rcx,
3408 ConsString::kSecondOffset,
3409 rdx,
3410 rbx,
3411 kDontSaveFPRegs);
3412 __ jmp(&after_writing);
3413
3414 __ bind(&skip_write_barrier);
3415 __ movp(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3416 __ movp(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3417
3418 __ bind(&after_writing);
3419
3420 __ movp(rax, rcx);
3421 __ IncrementCounter(counters->string_add_native(), 1);
3422 __ ret(2 * kPointerSize);
3423 __ bind(&non_ascii);
3424 // At least one of the strings is two-byte. Check whether it happens
3425 // to contain only one byte characters.
3426 // rcx: first instance type AND second instance type.
3427 // r8: first instance type.
3428 // r9: second instance type.
3429 __ testb(rcx, Immediate(kOneByteDataHintMask));
3430 __ j(not_zero, &ascii_data);
3431 __ xor_(r8, r9);
3432 STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
3433 __ andb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
3434 __ cmpb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
3435 __ j(equal, &ascii_data);
3436 // Allocate a two byte cons string.
3437 __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
3438 __ jmp(&allocated);
3439
3440 // We cannot encounter sliced strings or cons strings here since:
3441 STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
3442 // Handle creating a flat result from either external or sequential strings.
3443 // Locate the first characters' locations.
3444 // rax: first string
3445 // rbx: length of resulting flat string as smi
3446 // rdx: second string
3447 // r8: instance type of first string
3448 // r9: instance type of first string
3449 Label first_prepared, second_prepared;
3450 Label first_is_sequential, second_is_sequential;
3451 __ bind(&string_add_flat_result);
3452
3453 __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset));
3454 // r14: length of first string
3455 STATIC_ASSERT(kSeqStringTag == 0);
3456 __ testb(r8, Immediate(kStringRepresentationMask));
3457 __ j(zero, &first_is_sequential, Label::kNear);
3458 // Rule out short external string and load string resource.
3459 STATIC_ASSERT(kShortExternalStringTag != 0);
3460 __ testb(r8, Immediate(kShortExternalStringMask));
3461 __ j(not_zero, &call_runtime);
3462 __ movp(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset));
3463 __ jmp(&first_prepared, Label::kNear);
3464 __ bind(&first_is_sequential);
3465 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3466 __ lea(rcx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3467 __ bind(&first_prepared);
3468
3469 // Check whether both strings have same encoding.
3470 __ xorl(r8, r9);
3471 __ testb(r8, Immediate(kStringEncodingMask));
3472 __ j(not_zero, &call_runtime);
3473
3474 __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset));
3475 // r15: length of second string
3476 STATIC_ASSERT(kSeqStringTag == 0);
3477 __ testb(r9, Immediate(kStringRepresentationMask));
3478 __ j(zero, &second_is_sequential, Label::kNear);
3479 // Rule out short external string and load string resource.
3480 STATIC_ASSERT(kShortExternalStringTag != 0);
3481 __ testb(r9, Immediate(kShortExternalStringMask));
3482 __ j(not_zero, &call_runtime);
3483 __ movp(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset));
3484 __ jmp(&second_prepared, Label::kNear);
3485 __ bind(&second_is_sequential);
3486 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3487 __ lea(rdx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
3488 __ bind(&second_prepared);
3489
3490 Label non_ascii_string_add_flat_result;
3491 // r9: instance type of second string
3492 // First string and second string have the same encoding.
3493 STATIC_ASSERT(kTwoByteStringTag == 0);
3494 __ SmiToInteger32(rbx, rbx);
3495 __ testb(r9, Immediate(kStringEncodingMask));
3496 __ j(zero, &non_ascii_string_add_flat_result);
3497
3498 __ bind(&make_flat_ascii_string);
3499 // Both strings are ASCII strings. As they are short they are both flat.
3500 __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime);
3501 // rax: result string
3502 // Locate first character of result.
3503 __ lea(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3504 // rcx: first char of first string
3505 // rbx: first character of result
3506 // r14: length of first string
3507 StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, true);
3508 // rbx: next character of result
3509 // rdx: first char of second string
3510 // r15: length of second string
3511 StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, true);
3512 __ IncrementCounter(counters->string_add_native(), 1);
3513 __ ret(2 * kPointerSize);
3514
3515 __ bind(&non_ascii_string_add_flat_result);
3516 // Both strings are ASCII strings. As they are short they are both flat.
3517 __ AllocateTwoByteString(rax, rbx, rdi, r8, r9, &call_runtime);
3518 // rax: result string
3519 // Locate first character of result.
3520 __ lea(rbx, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
3521 // rcx: first char of first string
3522 // rbx: first character of result
3523 // r14: length of first string
3524 StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, false);
3525 // rbx: next character of result
3526 // rdx: first char of second string
3527 // r15: length of second string
3528 StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false);
3529 __ IncrementCounter(counters->string_add_native(), 1);
3530 __ ret(2 * kPointerSize);
3531
3532 // Just jump to runtime to add the two strings.
3533 __ bind(&call_runtime);
3534 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
3535
3536 if (call_builtin.is_linked()) {
3537 __ bind(&call_builtin);
3538 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
3539 }
3540 }
3541
3542
3543 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3544 __ push(rax);
3545 __ push(rdx);
3546 }
3547
3548
3549 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
3550 Register temp) {
3551 __ PopReturnAddressTo(temp);
3552 __ pop(rdx);
3553 __ pop(rax);
3554 __ PushReturnAddressFrom(temp);
3555 }
3556
3557
3558 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
3559 int stack_offset,
3560 Register arg,
3561 Register scratch1,
3562 Register scratch2,
3563 Register scratch3,
3564 Label* slow) {
3565 // First check if the argument is already a string.
3566 Label not_string, done;
3567 __ JumpIfSmi(arg, &not_string);
3568 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
3569 __ j(below, &done);
3570
3571 // Check the number to string cache.
3572 __ bind(&not_string);
3573 // Puts the cached result into scratch1.
3574 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow);
3575 __ movp(arg, scratch1);
3576 __ movp(Operand(rsp, stack_offset), arg);
3577 __ bind(&done);
3578 }
3579
3580
3581 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3582 Register dest,
3583 Register src,
3584 Register count,
3585 bool ascii) {
3586 Label loop;
3587 __ bind(&loop);
3588 // This loop just copies one character at a time, as it is only used for very
3589 // short strings.
3590 if (ascii) {
3591 __ movb(kScratchRegister, Operand(src, 0));
3592 __ movb(Operand(dest, 0), kScratchRegister);
3593 __ incq(src);
3594 __ incq(dest);
3595 } else {
3596 __ movzxwl(kScratchRegister, Operand(src, 0));
3597 __ movw(Operand(dest, 0), kScratchRegister);
3598 __ addq(src, Immediate(2));
3599 __ addq(dest, Immediate(2));
3600 }
3601 __ decl(count);
3602 __ j(not_zero, &loop);
3603 }
3604
3605
3606 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, 3171 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
3607 Register dest, 3172 Register dest,
3608 Register src, 3173 Register src,
3609 Register count, 3174 Register count,
3610 bool ascii) { 3175 bool ascii) {
3611 // Copy characters using rep movs of doublewords. Align destination on 4 byte 3176 // Copy characters using rep movs of doublewords. Align destination on 4 byte
3612 // boundary before starting rep movs. Copy remaining characters after running 3177 // boundary before starting rep movs. Copy remaining characters after running
3613 // rep movs. 3178 // rep movs.
3614 // Count is positive int32, dest and src are character pointers. 3179 // Count is positive int32, dest and src are character pointers.
3615 ASSERT(dest.is(rdi)); // rep movs destination 3180 ASSERT(dest.is(rdi)); // rep movs destination
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3652 __ movb(kScratchRegister, Operand(src, 0)); 3217 __ movb(kScratchRegister, Operand(src, 0));
3653 __ movb(Operand(dest, 0), kScratchRegister); 3218 __ movb(Operand(dest, 0), kScratchRegister);
3654 __ incq(src); 3219 __ incq(src);
3655 __ incq(dest); 3220 __ incq(dest);
3656 __ decl(count); 3221 __ decl(count);
3657 __ j(not_zero, &loop); 3222 __ j(not_zero, &loop);
3658 3223
3659 __ bind(&done); 3224 __ bind(&done);
3660 } 3225 }
3661 3226
3662 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
3663 Register c1,
3664 Register c2,
3665 Register scratch1,
3666 Register scratch2,
3667 Register scratch3,
3668 Register scratch4,
3669 Label* not_found) {
3670 // Register scratch3 is the general scratch register in this function.
3671 Register scratch = scratch3;
3672
3673 // Make sure that both characters are not digits as such strings has a
3674 // different hash algorithm. Don't try to look for these in the string table.
3675 Label not_array_index;
3676 __ leal(scratch, Operand(c1, -'0'));
3677 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3678 __ j(above, &not_array_index, Label::kNear);
3679 __ leal(scratch, Operand(c2, -'0'));
3680 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3681 __ j(below_equal, not_found);
3682
3683 __ bind(&not_array_index);
3684 // Calculate the two character string hash.
3685 Register hash = scratch1;
3686 GenerateHashInit(masm, hash, c1, scratch);
3687 GenerateHashAddCharacter(masm, hash, c2, scratch);
3688 GenerateHashGetHash(masm, hash, scratch);
3689
3690 // Collect the two characters in a register.
3691 Register chars = c1;
3692 __ shl(c2, Immediate(kBitsPerByte));
3693 __ orl(chars, c2);
3694
3695 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3696 // hash: hash of two character string.
3697
3698 // Load the string table.
3699 Register string_table = c2;
3700 __ LoadRoot(string_table, Heap::kStringTableRootIndex);
3701
3702 // Calculate capacity mask from the string table capacity.
3703 Register mask = scratch2;
3704 __ SmiToInteger32(mask,
3705 FieldOperand(string_table, StringTable::kCapacityOffset));
3706 __ decl(mask);
3707
3708 Register map = scratch4;
3709
3710 // Registers
3711 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3712 // hash: hash of two character string (32-bit int)
3713 // string_table: string table
3714 // mask: capacity mask (32-bit int)
3715 // map: -
3716 // scratch: -
3717
3718 // Perform a number of probes in the string table.
3719 static const int kProbes = 4;
3720 Label found_in_string_table;
3721 Label next_probe[kProbes];
3722 Register candidate = scratch; // Scratch register contains candidate.
3723 for (int i = 0; i < kProbes; i++) {
3724 // Calculate entry in string table.
3725 __ movl(scratch, hash);
3726 if (i > 0) {
3727 __ addl(scratch, Immediate(StringTable::GetProbeOffset(i)));
3728 }
3729 __ andl(scratch, mask);
3730
3731 // Load the entry from the string table.
3732 STATIC_ASSERT(StringTable::kEntrySize == 1);
3733 __ movp(candidate,
3734 FieldOperand(string_table,
3735 scratch,
3736 times_pointer_size,
3737 StringTable::kElementsStartOffset));
3738
3739 // If entry is undefined no string with this hash can be found.
3740 Label is_string;
3741 __ CmpObjectType(candidate, ODDBALL_TYPE, map);
3742 __ j(not_equal, &is_string, Label::kNear);
3743
3744 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
3745 __ j(equal, not_found);
3746 // Must be the hole (deleted entry).
3747 if (FLAG_debug_code) {
3748 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
3749 __ cmpq(kScratchRegister, candidate);
3750 __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
3751 }
3752 __ jmp(&next_probe[i]);
3753
3754 __ bind(&is_string);
3755
3756 // If length is not 2 the string is not a candidate.
3757 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
3758 Smi::FromInt(2));
3759 __ j(not_equal, &next_probe[i]);
3760
3761 // We use kScratchRegister as a temporary register in assumption that
3762 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
3763 Register temp = kScratchRegister;
3764
3765 // Check that the candidate is a non-external ASCII string.
3766 __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
3767 __ JumpIfInstanceTypeIsNotSequentialAscii(
3768 temp, temp, &next_probe[i]);
3769
3770 // Check if the two characters match.
3771 __ movl(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
3772 __ andl(temp, Immediate(0x0000ffff));
3773 __ cmpl(chars, temp);
3774 __ j(equal, &found_in_string_table);
3775 __ bind(&next_probe[i]);
3776 }
3777
3778 // No matching 2 character string found by probing.
3779 __ jmp(not_found);
3780
3781 // Scratch register contains result when we fall through to here.
3782 Register result = candidate;
3783 __ bind(&found_in_string_table);
3784 if (!result.is(rax)) {
3785 __ movp(rax, result);
3786 }
3787 }
3788
3789 3227
3790 void StringHelper::GenerateHashInit(MacroAssembler* masm, 3228 void StringHelper::GenerateHashInit(MacroAssembler* masm,
3791 Register hash, 3229 Register hash,
3792 Register character, 3230 Register character,
3793 Register scratch) { 3231 Register scratch) {
3794 // hash = (seed + character) + ((seed + character) << 10); 3232 // hash = (seed + character) + ((seed + character) << 10);
3795 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); 3233 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3796 __ SmiToInteger32(scratch, scratch); 3234 __ SmiToInteger32(scratch, scratch);
3797 __ addl(scratch, character); 3235 __ addl(scratch, character);
3798 __ movl(hash, scratch); 3236 __ movl(hash, scratch);
(...skipping 451 matching lines...) Expand 10 before | Expand all | Expand 10 after
4250 __ PushReturnAddressFrom(rcx); 3688 __ PushReturnAddressFrom(rcx);
4251 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); 3689 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4252 3690
4253 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3691 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4254 // tagged as a small integer. 3692 // tagged as a small integer.
4255 __ bind(&runtime); 3693 __ bind(&runtime);
4256 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3694 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4257 } 3695 }
4258 3696
4259 3697
3698 void ArrayPushStub::Generate(MacroAssembler* masm) {
3699 int argc = arguments_count();
3700
3701 StackArgumentsAccessor args(rsp, argc);
3702 if (argc == 0) {
3703 // Noop, return the length.
3704 __ movp(rax, FieldOperand(rdx, JSArray::kLengthOffset));
3705 __ ret((argc + 1) * kPointerSize);
3706 return;
3707 }
3708
3709 Isolate* isolate = masm->isolate();
3710
3711 if (argc != 1) {
3712 __ TailCallExternalReference(
3713 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3714 return;
3715 }
3716
3717 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3718
3719 // Get the elements array of the object.
3720 __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
3721
3722 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3723 // Check that the elements are in fast mode and writable.
3724 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
3725 isolate->factory()->fixed_array_map());
3726 __ j(not_equal, &call_builtin);
3727 }
3728
3729 // Get the array's length into rax and calculate new length.
3730 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
3731 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
3732 __ addl(rax, Immediate(argc));
3733
3734 // Get the elements' length into rcx.
3735 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
3736
3737 // Check if we could survive without allocation.
3738 __ cmpl(rax, rcx);
3739
3740 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3741 __ j(greater, &attempt_to_grow_elements);
3742
3743 // Check if value is a smi.
3744 __ movp(rcx, args.GetArgumentOperand(1));
3745 __ JumpIfNotSmi(rcx, &with_write_barrier);
3746
3747 // Store the value.
3748 __ movp(FieldOperand(rdi,
3749 rax,
3750 times_pointer_size,
3751 FixedArray::kHeaderSize - argc * kPointerSize),
3752 rcx);
3753 } else {
3754 __ j(greater, &call_builtin);
3755
3756 __ movp(rcx, args.GetArgumentOperand(1));
3757 __ StoreNumberToDoubleElements(
3758 rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize);
3759 }
3760
3761 // Save new length.
3762 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
3763
3764 __ Integer32ToSmi(rax, rax); // Return new length as smi.
3765 __ ret((argc + 1) * kPointerSize);
3766
3767 if (IsFastDoubleElementsKind(elements_kind())) {
3768 __ bind(&call_builtin);
3769 __ TailCallExternalReference(
3770 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3771 return;
3772 }
3773
3774 __ bind(&with_write_barrier);
3775
3776 if (IsFastSmiElementsKind(elements_kind())) {
3777 if (FLAG_trace_elements_transitions) __ jmp(&call_builtin);
3778
3779 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
3780 isolate->factory()->heap_number_map());
3781 __ j(equal, &call_builtin);
3782
3783 ElementsKind target_kind = IsHoleyElementsKind(elements_kind())
3784 ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
3785 __ movp(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
3786 __ movp(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset));
3787 __ movp(rbx, ContextOperand(rbx, Context::JS_ARRAY_MAPS_INDEX));
3788 const int header_size = FixedArrayBase::kHeaderSize;
3789 // Verify that the object can be transitioned in place.
3790 const int origin_offset = header_size + elements_kind() * kPointerSize;
3791 __ movp(rdi, FieldOperand(rbx, origin_offset));
3792 __ cmpq(rdi, FieldOperand(rdx, HeapObject::kMapOffset));
3793 __ j(not_equal, &call_builtin);
3794
3795 const int target_offset = header_size + target_kind * kPointerSize;
3796 __ movp(rbx, FieldOperand(rbx, target_offset));
3797 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
3798 masm, DONT_TRACK_ALLOCATION_SITE, NULL);
3799 __ movp(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
3800 }
3801
3802 // Save new length.
3803 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
3804
3805 // Store the value.
3806 __ lea(rdx, FieldOperand(rdi,
3807 rax, times_pointer_size,
3808 FixedArray::kHeaderSize - argc * kPointerSize));
3809 __ movp(Operand(rdx, 0), rcx);
3810
3811 __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
3812 OMIT_SMI_CHECK);
3813
3814 __ Integer32ToSmi(rax, rax); // Return new length as smi.
3815 __ ret((argc + 1) * kPointerSize);
3816
3817 __ bind(&attempt_to_grow_elements);
3818 if (!FLAG_inline_new) {
3819 __ bind(&call_builtin);
3820 __ TailCallExternalReference(
3821 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3822 return;
3823 }
3824
3825 __ movp(rbx, args.GetArgumentOperand(1));
3826 // Growing elements that are SMI-only requires special handling in case the
3827 // new element is non-Smi. For now, delegate to the builtin.
3828 Label no_fast_elements_check;
3829 __ JumpIfSmi(rbx, &no_fast_elements_check);
3830 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
3831 __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
3832 __ bind(&no_fast_elements_check);
3833
3834 ExternalReference new_space_allocation_top =
3835 ExternalReference::new_space_allocation_top_address(isolate);
3836 ExternalReference new_space_allocation_limit =
3837 ExternalReference::new_space_allocation_limit_address(isolate);
3838
3839 const int kAllocationDelta = 4;
3840 ASSERT(kAllocationDelta >= argc);
3841 // Load top.
3842 __ Load(rcx, new_space_allocation_top);
3843
3844 // Check if it's the end of elements.
3845 __ lea(rdx, FieldOperand(rdi,
3846 rax, times_pointer_size,
3847 FixedArray::kHeaderSize - argc * kPointerSize));
3848 __ cmpq(rdx, rcx);
3849 __ j(not_equal, &call_builtin);
3850 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
3851 Operand limit_operand = masm->ExternalOperand(new_space_allocation_limit);
3852 __ cmpq(rcx, limit_operand);
3853 __ j(above, &call_builtin);
3854
3855 // We fit and could grow elements.
3856 __ Store(new_space_allocation_top, rcx);
3857
3858 // Push the argument...
3859 __ movp(Operand(rdx, 0), rbx);
3860 // ... and fill the rest with holes.
3861 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
3862 for (int i = 1; i < kAllocationDelta; i++) {
3863 __ movp(Operand(rdx, i * kPointerSize), kScratchRegister);
3864 }
3865
3866 if (IsFastObjectElementsKind(elements_kind())) {
3867 // We know the elements array is in new space so we don't need the
3868 // remembered set, but we just pushed a value onto it so we may have to tell
3869 // the incremental marker to rescan the object that we just grew. We don't
3870 // need to worry about the holes because they are in old space and already
3871 // marked black.
3872 __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
3873 }
3874
3875 // Restore receiver to rdx as finish sequence assumes it's here.
3876 __ movp(rdx, args.GetReceiverOperand());
3877
3878 // Increment element's and array's sizes.
3879 __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
3880 Smi::FromInt(kAllocationDelta));
3881
3882 // Make new length a smi before returning it.
3883 __ Integer32ToSmi(rax, rax);
3884 __ movp(FieldOperand(rdx, JSArray::kLengthOffset), rax);
3885
3886 __ ret((argc + 1) * kPointerSize);
3887
3888 __ bind(&call_builtin);
3889 __ TailCallExternalReference(
3890 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3891 }
3892
3893
4260 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3894 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4261 // ----------- S t a t e ------------- 3895 // ----------- S t a t e -------------
4262 // -- rdx : left 3896 // -- rdx : left
4263 // -- rax : right 3897 // -- rax : right
4264 // -- rsp[0] : return address 3898 // -- rsp[0] : return address
4265 // ----------------------------------- 3899 // -----------------------------------
4266 Isolate* isolate = masm->isolate(); 3900 Isolate* isolate = masm->isolate();
4267 3901
4268 // Load rcx with the allocation site. We stick an undefined dummy value here 3902 // Load rcx with the allocation site. We stick an undefined dummy value here
4269 // and replace it with the real allocation site later when we instantiate this 3903 // and replace it with the real allocation site later when we instantiate this
(...skipping 864 matching lines...) Expand 10 before | Expand all | Expand 10 after
5134 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4768 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5135 __ PopReturnAddressTo(rcx); 4769 __ PopReturnAddressTo(rcx);
5136 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE 4770 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
5137 ? kPointerSize 4771 ? kPointerSize
5138 : 0; 4772 : 0;
5139 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); 4773 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
5140 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. 4774 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
5141 } 4775 }
5142 4776
5143 4777
5144 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
5145 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5146 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5147 __ movp(rdi, rax);
5148 int parameter_count_offset =
5149 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5150 __ movp(rax, MemOperand(rbp, parameter_count_offset));
5151 // The parameter count above includes the receiver for the arguments passed to
5152 // the deoptimization handler. Subtract the receiver for the parameter count
5153 // for the call.
5154 __ subl(rax, Immediate(1));
5155 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5156 ParameterCount argument_count(rax);
5157 __ InvokeFunction(rdi, argument_count, JUMP_FUNCTION, NullCallWrapper());
5158 }
5159
5160
5161 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4778 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5162 if (masm->isolate()->function_entry_hook() != NULL) { 4779 if (masm->isolate()->function_entry_hook() != NULL) {
5163 ProfileEntryHookStub stub; 4780 ProfileEntryHookStub stub;
5164 masm->CallStub(&stub); 4781 masm->CallStub(&stub);
5165 } 4782 }
5166 } 4783 }
5167 4784
5168 4785
5169 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4786 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5170 // This stub can be called from essentially anywhere, so it needs to save 4787 // This stub can be called from essentially anywhere, so it needs to save
5171 // all volatile and callee-save registers. 4788 // all volatile and callee-save registers.
5172 const size_t kNumSavedRegisters = 2; 4789 const size_t kNumSavedRegisters = 2;
5173 __ push(arg_reg_1); 4790 __ push(arg_reg_1);
5174 __ push(arg_reg_2); 4791 __ push(arg_reg_2);
5175 4792
5176 // Calculate the original stack pointer and store it in the second arg. 4793 // Calculate the original stack pointer and store it in the second arg.
5177 __ lea(arg_reg_2, 4794 __ lea(arg_reg_2,
5178 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); 4795 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
5179 4796
5180 // Calculate the function address to the first arg. 4797 // Calculate the function address to the first arg.
5181 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); 4798 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
5182 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 4799 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
5183 4800
5184 // Save the remainder of the volatile registers. 4801 // Save the remainder of the volatile registers.
5185 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4802 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
5186 4803
5187 // Call the entry hook function. 4804 // Call the entry hook function.
5188 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), 4805 __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
5189 RelocInfo::NONE64); 4806 Assembler::RelocInfoNone());
5190 4807
5191 AllowExternalCallThatCantCauseGC scope(masm); 4808 AllowExternalCallThatCantCauseGC scope(masm);
5192 4809
5193 const int kArgumentCount = 2; 4810 const int kArgumentCount = 2;
5194 __ PrepareCallCFunction(kArgumentCount); 4811 __ PrepareCallCFunction(kArgumentCount);
5195 __ CallCFunction(rax, kArgumentCount); 4812 __ CallCFunction(rax, kArgumentCount);
5196 4813
5197 // Restore volatile regs. 4814 // Restore volatile regs.
5198 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4815 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
5199 __ pop(arg_reg_2); 4816 __ pop(arg_reg_2);
(...skipping 25 matching lines...) Expand all
5225 // If we reached this point there is a problem. 4842 // If we reached this point there is a problem.
5226 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4843 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5227 } else { 4844 } else {
5228 UNREACHABLE(); 4845 UNREACHABLE();
5229 } 4846 }
5230 } 4847 }
5231 4848
5232 4849
5233 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 4850 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5234 AllocationSiteOverrideMode mode) { 4851 AllocationSiteOverrideMode mode) {
5235 // rbx - type info cell (if mode != DISABLE_ALLOCATION_SITES) 4852 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
5236 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES) 4853 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
5237 // rax - number of arguments 4854 // rax - number of arguments
5238 // rdi - constructor? 4855 // rdi - constructor?
5239 // rsp[0] - return address 4856 // rsp[0] - return address
5240 // rsp[8] - last argument 4857 // rsp[8] - last argument
5241 Handle<Object> undefined_sentinel( 4858 Handle<Object> undefined_sentinel(
5242 masm->isolate()->heap()->undefined_value(), 4859 masm->isolate()->heap()->undefined_value(),
5243 masm->isolate()); 4860 masm->isolate());
5244 4861
5245 Label normal_sequence; 4862 Label normal_sequence;
(...skipping 23 matching lines...) Expand all
5269 ArraySingleArgumentConstructorStub stub_holey(holey_initial, 4886 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5270 DISABLE_ALLOCATION_SITES); 4887 DISABLE_ALLOCATION_SITES);
5271 __ TailCallStub(&stub_holey); 4888 __ TailCallStub(&stub_holey);
5272 4889
5273 __ bind(&normal_sequence); 4890 __ bind(&normal_sequence);
5274 ArraySingleArgumentConstructorStub stub(initial, 4891 ArraySingleArgumentConstructorStub stub(initial,
5275 DISABLE_ALLOCATION_SITES); 4892 DISABLE_ALLOCATION_SITES);
5276 __ TailCallStub(&stub); 4893 __ TailCallStub(&stub);
5277 } else if (mode == DONT_OVERRIDE) { 4894 } else if (mode == DONT_OVERRIDE) {
5278 // We are going to create a holey array, but our kind is non-holey. 4895 // We are going to create a holey array, but our kind is non-holey.
5279 // Fix kind and retry (only if we have an allocation site in the cell). 4896 // Fix kind and retry (only if we have an allocation site in the slot).
5280 __ incl(rdx); 4897 __ incl(rdx);
5281 __ movp(rcx, FieldOperand(rbx, Cell::kValueOffset)); 4898
5282 if (FLAG_debug_code) { 4899 if (FLAG_debug_code) {
5283 Handle<Map> allocation_site_map = 4900 Handle<Map> allocation_site_map =
5284 masm->isolate()->factory()->allocation_site_map(); 4901 masm->isolate()->factory()->allocation_site_map();
5285 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); 4902 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
5286 __ Assert(equal, kExpectedAllocationSiteInCell); 4903 __ Assert(equal, kExpectedAllocationSite);
5287 } 4904 }
5288 4905
5289 // Save the resulting elements kind in type info. We can't just store r3 4906 // Save the resulting elements kind in type info. We can't just store r3
5290 // in the AllocationSite::transition_info field because elements kind is 4907 // in the AllocationSite::transition_info field because elements kind is
5291 // restricted to a portion of the field...upper bits need to be left alone. 4908 // restricted to a portion of the field...upper bits need to be left alone.
5292 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4909 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5293 __ SmiAddConstant(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset), 4910 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
5294 Smi::FromInt(kFastElementsKindPackedToHoley)); 4911 Smi::FromInt(kFastElementsKindPackedToHoley));
5295 4912
5296 __ bind(&normal_sequence); 4913 __ bind(&normal_sequence);
5297 int last_index = GetSequenceIndexFromFastElementsKind( 4914 int last_index = GetSequenceIndexFromFastElementsKind(
5298 TERMINAL_FAST_ELEMENTS_KIND); 4915 TERMINAL_FAST_ELEMENTS_KIND);
5299 for (int i = 0; i <= last_index; ++i) { 4916 for (int i = 0; i <= last_index; ++i) {
5300 Label next; 4917 Label next;
5301 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4918 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5302 __ cmpl(rdx, Immediate(kind)); 4919 __ cmpl(rdx, Immediate(kind));
5303 __ j(not_equal, &next); 4920 __ j(not_equal, &next);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
5379 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4996 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5380 } else { 4997 } else {
5381 UNREACHABLE(); 4998 UNREACHABLE();
5382 } 4999 }
5383 } 5000 }
5384 5001
5385 5002
5386 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5003 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5387 // ----------- S t a t e ------------- 5004 // ----------- S t a t e -------------
5388 // -- rax : argc 5005 // -- rax : argc
5389 // -- rbx : type info cell 5006 // -- rbx : feedback vector (fixed array or undefined)
5007 // -- rdx : slot index (if ebx is fixed array)
5390 // -- rdi : constructor 5008 // -- rdi : constructor
5391 // -- rsp[0] : return address 5009 // -- rsp[0] : return address
5392 // -- rsp[8] : last argument 5010 // -- rsp[8] : last argument
5393 // ----------------------------------- 5011 // -----------------------------------
5394 Handle<Object> undefined_sentinel( 5012 Handle<Object> undefined_sentinel(
5395 masm->isolate()->heap()->undefined_value(), 5013 masm->isolate()->heap()->undefined_value(),
5396 masm->isolate()); 5014 masm->isolate());
5397 5015
5398 if (FLAG_debug_code) { 5016 if (FLAG_debug_code) {
5399 // The array construct code is only set for the global and natives 5017 // The array construct code is only set for the global and natives
5400 // builtin Array functions which always have maps. 5018 // builtin Array functions which always have maps.
5401 5019
5402 // Initial map for the builtin Array function should be a map. 5020 // Initial map for the builtin Array function should be a map.
5403 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 5021 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
5404 // Will both indicate a NULL and a Smi. 5022 // Will both indicate a NULL and a Smi.
5405 STATIC_ASSERT(kSmiTag == 0); 5023 STATIC_ASSERT(kSmiTag == 0);
5406 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 5024 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
5407 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 5025 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
5408 __ CmpObjectType(rcx, MAP_TYPE, rcx); 5026 __ CmpObjectType(rcx, MAP_TYPE, rcx);
5409 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 5027 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5410 5028
5411 // We should either have undefined in rbx or a valid cell 5029 // We should either have undefined in rbx or a valid fixed array.
5412 Label okay_here; 5030 Label okay_here;
5413 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 5031 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5414 __ Cmp(rbx, undefined_sentinel); 5032 __ Cmp(rbx, undefined_sentinel);
5415 __ j(equal, &okay_here); 5033 __ j(equal, &okay_here);
5416 __ Cmp(FieldOperand(rbx, 0), cell_map); 5034 __ Cmp(FieldOperand(rbx, 0), fixed_array_map);
5417 __ Assert(equal, kExpectedPropertyCellInRegisterRbx); 5035 __ Assert(equal, kExpectedFixedArrayInRegisterRbx);
5036
5037 // rdx should be a smi if we don't have undefined in rbx.
5038 __ AssertSmi(rdx);
5039
5418 __ bind(&okay_here); 5040 __ bind(&okay_here);
5419 } 5041 }
5420 5042
5421 Label no_info; 5043 Label no_info;
5422 // If the type cell is undefined, or contains anything other than an 5044 // If the feedback slot is undefined, or contains anything other than an
5423 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5045 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5424 __ Cmp(rbx, undefined_sentinel); 5046 __ Cmp(rbx, undefined_sentinel);
5425 __ j(equal, &no_info); 5047 __ j(equal, &no_info);
5426 __ movp(rdx, FieldOperand(rbx, Cell::kValueOffset)); 5048 __ SmiToInteger32(rdx, rdx);
5427 __ Cmp(FieldOperand(rdx, 0), 5049 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
5050 FixedArray::kHeaderSize));
5051 __ Integer32ToSmi(rdx, rdx);
5052 __ Cmp(FieldOperand(rbx, 0),
5428 masm->isolate()->factory()->allocation_site_map()); 5053 masm->isolate()->factory()->allocation_site_map());
5429 __ j(not_equal, &no_info); 5054 __ j(not_equal, &no_info);
5430 5055
5431 // Only look at the lower 16 bits of the transition info. 5056 // Only look at the lower 16 bits of the transition info.
5432 __ movp(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset)); 5057 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
5433 __ SmiToInteger32(rdx, rdx); 5058 __ SmiToInteger32(rdx, rdx);
5434 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5059 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5435 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); 5060 __ and_(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
5436 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 5061 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5437 5062
5438 __ bind(&no_info); 5063 __ bind(&no_info);
5439 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 5064 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5440 } 5065 }
5441 5066
5442 5067
(...skipping 30 matching lines...) Expand all
5473 5098
5474 __ bind(&not_one_case); 5099 __ bind(&not_one_case);
5475 InternalArrayNArgumentsConstructorStub stubN(kind); 5100 InternalArrayNArgumentsConstructorStub stubN(kind);
5476 __ TailCallStub(&stubN); 5101 __ TailCallStub(&stubN);
5477 } 5102 }
5478 5103
5479 5104
5480 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 5105 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5481 // ----------- S t a t e ------------- 5106 // ----------- S t a t e -------------
5482 // -- rax : argc 5107 // -- rax : argc
5483 // -- rbx : type info cell
5484 // -- rdi : constructor 5108 // -- rdi : constructor
5485 // -- rsp[0] : return address 5109 // -- rsp[0] : return address
5486 // -- rsp[8] : last argument 5110 // -- rsp[8] : last argument
5487 // ----------------------------------- 5111 // -----------------------------------
5488 5112
5489 if (FLAG_debug_code) { 5113 if (FLAG_debug_code) {
5490 // The array construct code is only set for the global and natives 5114 // The array construct code is only set for the global and natives
5491 // builtin Array functions which always have maps. 5115 // builtin Array functions which always have maps.
5492 5116
5493 // Initial map for the builtin Array function should be a map. 5117 // Initial map for the builtin Array function should be a map.
(...skipping 29 matching lines...) Expand all
5523 Label fast_elements_case; 5147 Label fast_elements_case;
5524 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 5148 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
5525 __ j(equal, &fast_elements_case); 5149 __ j(equal, &fast_elements_case);
5526 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 5150 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5527 5151
5528 __ bind(&fast_elements_case); 5152 __ bind(&fast_elements_case);
5529 GenerateCase(masm, FAST_ELEMENTS); 5153 GenerateCase(masm, FAST_ELEMENTS);
5530 } 5154 }
5531 5155
5532 5156
5157 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5158 // ----------- S t a t e -------------
5159 // -- rax : callee
5160 // -- rbx : call_data
5161 // -- rcx : holder
5162 // -- rdx : api_function_address
5163 // -- rsi : context
5164 // --
5165 // -- rsp[0] : return address
5166 // -- rsp[8] : last argument
5167 // -- ...
5168 // -- rsp[argc * 8] : first argument
5169 // -- rsp[(argc + 1) * 8] : receiver
5170 // -----------------------------------
5171
5172 Register callee = rax;
5173 Register call_data = rbx;
5174 Register holder = rcx;
5175 Register api_function_address = rdx;
5176 Register return_address = rdi;
5177 Register context = rsi;
5178
5179 int argc = ArgumentBits::decode(bit_field_);
5180 bool is_store = IsStoreBits::decode(bit_field_);
5181 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5182
5183 typedef FunctionCallbackArguments FCA;
5184
5185 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5186 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5187 STATIC_ASSERT(FCA::kDataIndex == 4);
5188 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5189 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5190 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5191 STATIC_ASSERT(FCA::kHolderIndex == 0);
5192 STATIC_ASSERT(FCA::kArgsLength == 7);
5193
5194 __ PopReturnAddressTo(return_address);
5195
5196 // context save
5197 __ push(context);
5198 // load context from callee
5199 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5200
5201 // callee
5202 __ push(callee);
5203
5204 // call data
5205 __ push(call_data);
5206 Register scratch = call_data;
5207 if (!call_data_undefined) {
5208 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5209 }
5210 // return value
5211 __ push(scratch);
5212 // return value default
5213 __ push(scratch);
5214 // isolate
5215 __ Move(scratch,
5216 ExternalReference::isolate_address(masm->isolate()));
5217 __ push(scratch);
5218 // holder
5219 __ push(holder);
5220
5221 __ movp(scratch, rsp);
5222 // Push return address back on stack.
5223 __ PushReturnAddressFrom(return_address);
5224
5225 // Allocate the v8::Arguments structure in the arguments' space since
5226 // it's not controlled by GC.
5227 const int kApiStackSpace = 4;
5228
5229 __ PrepareCallApiFunction(kApiStackSpace);
5230
5231 // FunctionCallbackInfo::implicit_args_.
5232 __ movp(StackSpaceOperand(0), scratch);
5233 __ addq(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5234 __ movp(StackSpaceOperand(1), scratch); // FunctionCallbackInfo::values_.
5235 __ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_.
5236 // FunctionCallbackInfo::is_construct_call_.
5237 __ Set(StackSpaceOperand(3), 0);
5238
5239 #if defined(__MINGW64__) || defined(_WIN64)
5240 Register arguments_arg = rcx;
5241 Register callback_arg = rdx;
5242 #else
5243 Register arguments_arg = rdi;
5244 Register callback_arg = rsi;
5245 #endif
5246
5247 // It's okay if api_function_address == callback_arg
5248 // but not arguments_arg
5249 ASSERT(!api_function_address.is(arguments_arg));
5250
5251 // v8::InvocationCallback's argument.
5252 __ lea(arguments_arg, StackSpaceOperand(0));
5253
5254 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5255
5256 // Accessor for FunctionCallbackInfo and first js arg.
5257 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5258 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5259 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5260 FCA::kArgsLength - FCA::kContextSaveIndex);
5261 // Stores return the first js argument
5262 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
5263 is_store ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
5264 __ CallApiFunctionAndReturn(
5265 api_function_address,
5266 thunk_address,
5267 callback_arg,
5268 argc + FCA::kArgsLength + 1,
5269 return_value_operand,
5270 &context_restore_operand);
5271 }
5272
5273
5274 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5275 // ----------- S t a t e -------------
5276 // -- rsp[0] : return address
5277 // -- rsp[8] : name
5278 // -- rsp[16 - kArgsLength*8] : PropertyCallbackArguments object
5279 // -- ...
5280 // -- r8 : api_function_address
5281 // -----------------------------------
5282
5283 #if defined(__MINGW64__) || defined(_WIN64)
5284 Register getter_arg = r8;
5285 Register accessor_info_arg = rdx;
5286 Register name_arg = rcx;
5287 #else
5288 Register getter_arg = rdx;
5289 Register accessor_info_arg = rsi;
5290 Register name_arg = rdi;
5291 #endif
5292 Register api_function_address = r8;
5293 Register scratch = rax;
5294
5295 // v8::Arguments::values_ and handler for name.
5296 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
5297
5298 // Allocate v8::AccessorInfo in non-GCed stack space.
5299 const int kArgStackSpace = 1;
5300
5301 __ lea(name_arg, Operand(rsp, kPCOnStackSize));
5302
5303 __ PrepareCallApiFunction(kArgStackSpace);
5304 __ lea(scratch, Operand(name_arg, 1 * kPointerSize));
5305
5306 // v8::PropertyAccessorInfo::args_.
5307 __ movp(StackSpaceOperand(0), scratch);
5308
5309 // The context register (rsi) has been saved in PrepareCallApiFunction and
5310 // could be used to pass arguments.
5311 __ lea(accessor_info_arg, StackSpaceOperand(0));
5312
5313 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
5314
5315 // It's okay if api_function_address == getter_arg
5316 // but not accessor_info_arg or name_arg
5317 ASSERT(!api_function_address.is(accessor_info_arg) &&
5318 !api_function_address.is(name_arg));
5319
5320 // The name handler is counted as an argument.
5321 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength);
5322 Operand return_value_operand = args.GetArgumentOperand(
5323 PropertyCallbackArguments::kArgsLength - 1 -
5324 PropertyCallbackArguments::kReturnValueOffset);
5325 __ CallApiFunctionAndReturn(api_function_address,
5326 thunk_address,
5327 getter_arg,
5328 kStackSpace,
5329 return_value_operand,
5330 NULL);
5331 }
5332
5333
5533 #undef __ 5334 #undef __
5534 5335
5535 } } // namespace v8::internal 5336 } } // namespace v8::internal
5536 5337
5537 #endif // V8_TARGET_ARCH_X64 5338 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698