Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(404)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 185653004: Experimental parser: merge to r19637 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
47 Isolate* isolate, 47 Isolate* isolate,
48 CodeStubInterfaceDescriptor* descriptor) { 48 CodeStubInterfaceDescriptor* descriptor) {
49 static Register registers[] = { ebx }; 49 static Register registers[] = { ebx };
50 descriptor->register_param_count_ = 1; 50 descriptor->register_param_count_ = 1;
51 descriptor->register_params_ = registers; 51 descriptor->register_params_ = registers;
52 descriptor->deoptimization_handler_ = 52 descriptor->deoptimization_handler_ =
53 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; 53 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
54 } 54 }
55 55
56 56
57 void FastNewContextStub::InitializeInterfaceDescriptor(
58 Isolate* isolate,
59 CodeStubInterfaceDescriptor* descriptor) {
60 static Register registers[] = { edi };
61 descriptor->register_param_count_ = 1;
62 descriptor->register_params_ = registers;
63 descriptor->deoptimization_handler_ = NULL;
64 }
65
66
57 void ToNumberStub::InitializeInterfaceDescriptor( 67 void ToNumberStub::InitializeInterfaceDescriptor(
58 Isolate* isolate, 68 Isolate* isolate,
59 CodeStubInterfaceDescriptor* descriptor) { 69 CodeStubInterfaceDescriptor* descriptor) {
60 static Register registers[] = { eax }; 70 static Register registers[] = { eax };
61 descriptor->register_param_count_ = 1; 71 descriptor->register_param_count_ = 1;
62 descriptor->register_params_ = registers; 72 descriptor->register_params_ = registers;
63 descriptor->deoptimization_handler_ = NULL; 73 descriptor->deoptimization_handler_ = NULL;
64 } 74 }
65 75
66 76
(...skipping 26 matching lines...) Expand all
93 descriptor->register_param_count_ = 4; 103 descriptor->register_param_count_ = 4;
94 descriptor->register_params_ = registers; 104 descriptor->register_params_ = registers;
95 descriptor->deoptimization_handler_ = 105 descriptor->deoptimization_handler_ =
96 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry; 106 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
97 } 107 }
98 108
99 109
100 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( 110 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
101 Isolate* isolate, 111 Isolate* isolate,
102 CodeStubInterfaceDescriptor* descriptor) { 112 CodeStubInterfaceDescriptor* descriptor) {
103 static Register registers[] = { ebx }; 113 static Register registers[] = { ebx, edx };
104 descriptor->register_param_count_ = 1; 114 descriptor->register_param_count_ = 2;
105 descriptor->register_params_ = registers; 115 descriptor->register_params_ = registers;
106 descriptor->deoptimization_handler_ = NULL; 116 descriptor->deoptimization_handler_ = NULL;
107 } 117 }
108 118
109 119
110 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 120 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
111 Isolate* isolate, 121 Isolate* isolate,
112 CodeStubInterfaceDescriptor* descriptor) { 122 CodeStubInterfaceDescriptor* descriptor) {
113 static Register registers[] = { edx, ecx }; 123 static Register registers[] = { edx, ecx };
114 descriptor->register_param_count_ = 2; 124 descriptor->register_param_count_ = 2;
115 descriptor->register_params_ = registers; 125 descriptor->register_params_ = registers;
116 descriptor->deoptimization_handler_ = 126 descriptor->deoptimization_handler_ =
117 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 127 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
118 } 128 }
119 129
120 130
121 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( 131 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor(
122 Isolate* isolate, 132 Isolate* isolate,
123 CodeStubInterfaceDescriptor* descriptor) { 133 CodeStubInterfaceDescriptor* descriptor) {
124 static Register registers[] = { edx, ecx }; 134 static Register registers[] = { edx, ecx };
125 descriptor->register_param_count_ = 2; 135 descriptor->register_param_count_ = 2;
126 descriptor->register_params_ = registers; 136 descriptor->register_params_ = registers;
127 descriptor->deoptimization_handler_ = 137 descriptor->deoptimization_handler_ =
128 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 138 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
129 } 139 }
130 140
131 141
142 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
143 Isolate* isolate,
144 CodeStubInterfaceDescriptor* descriptor) {
145 static Register registers[] = { ecx, ebx, eax };
146 descriptor->register_param_count_ = 3;
147 descriptor->register_params_ = registers;
148 descriptor->deoptimization_handler_ =
149 Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry;
150 }
151
152
132 void LoadFieldStub::InitializeInterfaceDescriptor( 153 void LoadFieldStub::InitializeInterfaceDescriptor(
133 Isolate* isolate, 154 Isolate* isolate,
134 CodeStubInterfaceDescriptor* descriptor) { 155 CodeStubInterfaceDescriptor* descriptor) {
135 static Register registers[] = { edx }; 156 static Register registers[] = { edx };
136 descriptor->register_param_count_ = 1;
137 descriptor->register_params_ = registers;
138 descriptor->deoptimization_handler_ = NULL;
139 }
140
141
142 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
143 Isolate* isolate,
144 CodeStubInterfaceDescriptor* descriptor) {
145 static Register registers[] = { edx };
146 descriptor->register_param_count_ = 1; 157 descriptor->register_param_count_ = 1;
147 descriptor->register_params_ = registers; 158 descriptor->register_params_ = registers;
148 descriptor->deoptimization_handler_ = NULL; 159 descriptor->deoptimization_handler_ = NULL;
149 } 160 }
150 161
151 162
152 void KeyedArrayCallStub::InitializeInterfaceDescriptor( 163 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
153 Isolate* isolate, 164 Isolate* isolate,
154 CodeStubInterfaceDescriptor* descriptor) { 165 CodeStubInterfaceDescriptor* descriptor) {
155 static Register registers[] = { ecx }; 166 static Register registers[] = { edx };
156 descriptor->register_param_count_ = 1; 167 descriptor->register_param_count_ = 1;
157 descriptor->register_params_ = registers; 168 descriptor->register_params_ = registers;
158 descriptor->continuation_type_ = TAIL_CALL_CONTINUATION; 169 descriptor->deoptimization_handler_ = NULL;
159 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
160 descriptor->deoptimization_handler_ =
161 FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
162 } 170 }
163 171
164 172
165 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( 173 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
166 Isolate* isolate, 174 Isolate* isolate,
167 CodeStubInterfaceDescriptor* descriptor) { 175 CodeStubInterfaceDescriptor* descriptor) {
168 static Register registers[] = { edx, ecx, eax }; 176 static Register registers[] = { edx, ecx, eax };
169 descriptor->register_param_count_ = 3; 177 descriptor->register_param_count_ = 3;
170 descriptor->register_params_ = registers; 178 descriptor->register_params_ = registers;
171 descriptor->deoptimization_handler_ = 179 descriptor->deoptimization_handler_ =
(...skipping 12 matching lines...) Expand all
184 } 192 }
185 193
186 194
187 static void InitializeArrayConstructorDescriptor( 195 static void InitializeArrayConstructorDescriptor(
188 Isolate* isolate, 196 Isolate* isolate,
189 CodeStubInterfaceDescriptor* descriptor, 197 CodeStubInterfaceDescriptor* descriptor,
190 int constant_stack_parameter_count) { 198 int constant_stack_parameter_count) {
191 // register state 199 // register state
192 // eax -- number of arguments 200 // eax -- number of arguments
193 // edi -- function 201 // edi -- function
194 // ebx -- type info cell with elements kind 202 // ebx -- allocation site with elements kind
195 static Register registers_variable_args[] = { edi, ebx, eax }; 203 static Register registers_variable_args[] = { edi, ebx, eax };
196 static Register registers_no_args[] = { edi, ebx }; 204 static Register registers_no_args[] = { edi, ebx };
197 205
198 if (constant_stack_parameter_count == 0) { 206 if (constant_stack_parameter_count == 0) {
199 descriptor->register_param_count_ = 2; 207 descriptor->register_param_count_ = 2;
200 descriptor->register_params_ = registers_no_args; 208 descriptor->register_params_ = registers_no_args;
201 } else { 209 } else {
202 // stack param count needs (constructor pointer, and single argument) 210 // stack param count needs (constructor pointer, and single argument)
203 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; 211 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
204 descriptor->stack_parameter_count_ = eax; 212 descriptor->stack_parameter_count_ = eax;
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
346 Isolate* isolate, 354 Isolate* isolate,
347 CodeStubInterfaceDescriptor* descriptor) { 355 CodeStubInterfaceDescriptor* descriptor) {
348 static Register registers[] = { ecx, edx, eax }; 356 static Register registers[] = { ecx, edx, eax };
349 descriptor->register_param_count_ = 3; 357 descriptor->register_param_count_ = 3;
350 descriptor->register_params_ = registers; 358 descriptor->register_params_ = registers;
351 descriptor->deoptimization_handler_ = 359 descriptor->deoptimization_handler_ =
352 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite); 360 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
353 } 361 }
354 362
355 363
356 void NewStringAddStub::InitializeInterfaceDescriptor( 364 void StringAddStub::InitializeInterfaceDescriptor(
357 Isolate* isolate, 365 Isolate* isolate,
358 CodeStubInterfaceDescriptor* descriptor) { 366 CodeStubInterfaceDescriptor* descriptor) {
359 static Register registers[] = { edx, eax }; 367 static Register registers[] = { edx, eax };
360 descriptor->register_param_count_ = 2; 368 descriptor->register_param_count_ = 2;
361 descriptor->register_params_ = registers; 369 descriptor->register_params_ = registers;
362 descriptor->deoptimization_handler_ = 370 descriptor->deoptimization_handler_ =
363 Runtime::FunctionForId(Runtime::kStringAdd)->entry; 371 Runtime::FunctionForId(Runtime::kStringAdd)->entry;
364 } 372 }
365 373
366 374
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
404 ecx, // name 412 ecx, // name
405 }; 413 };
406 static Representation representations[] = { 414 static Representation representations[] = {
407 Representation::Tagged(), // context 415 Representation::Tagged(), // context
408 Representation::Tagged(), // name 416 Representation::Tagged(), // name
409 }; 417 };
410 descriptor->register_param_count_ = 2; 418 descriptor->register_param_count_ = 2;
411 descriptor->register_params_ = registers; 419 descriptor->register_params_ = registers;
412 descriptor->param_representations_ = representations; 420 descriptor->param_representations_ = representations;
413 } 421 }
422 {
423 CallInterfaceDescriptor* descriptor =
424 isolate->call_descriptor(Isolate::CallHandler);
425 static Register registers[] = { esi, // context
426 edx, // receiver
427 };
428 static Representation representations[] = {
429 Representation::Tagged(), // context
430 Representation::Tagged(), // receiver
431 };
432 descriptor->register_param_count_ = 2;
433 descriptor->register_params_ = registers;
434 descriptor->param_representations_ = representations;
435 }
436 {
437 CallInterfaceDescriptor* descriptor =
438 isolate->call_descriptor(Isolate::ApiFunctionCall);
439 static Register registers[] = { eax, // callee
440 ebx, // call_data
441 ecx, // holder
442 edx, // api_function_address
443 esi, // context
444 };
445 static Representation representations[] = {
446 Representation::Tagged(), // callee
447 Representation::Tagged(), // call_data
448 Representation::Tagged(), // holder
449 Representation::External(), // api_function_address
450 Representation::Tagged(), // context
451 };
452 descriptor->register_param_count_ = 5;
453 descriptor->register_params_ = registers;
454 descriptor->param_representations_ = representations;
455 }
414 } 456 }
415 457
416 458
417 #define __ ACCESS_MASM(masm) 459 #define __ ACCESS_MASM(masm)
418 460
419 461
420 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 462 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
421 // Update the static counter each time a new code stub is generated. 463 // Update the static counter each time a new code stub is generated.
422 Isolate* isolate = masm->isolate(); 464 Isolate* isolate = masm->isolate();
423 isolate->counters()->code_stubs()->Increment(); 465 isolate->counters()->code_stubs()->Increment();
(...skipping 10 matching lines...) Expand all
434 __ push(descriptor->register_params_[i]); 476 __ push(descriptor->register_params_[i]);
435 } 477 }
436 ExternalReference miss = descriptor->miss_handler(); 478 ExternalReference miss = descriptor->miss_handler();
437 __ CallExternalReference(miss, descriptor->register_param_count_); 479 __ CallExternalReference(miss, descriptor->register_param_count_);
438 } 480 }
439 481
440 __ ret(0); 482 __ ret(0);
441 } 483 }
442 484
443 485
444 void FastNewContextStub::Generate(MacroAssembler* masm) {
445 // Try to allocate the context in new space.
446 Label gc;
447 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
448 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
449 eax, ebx, ecx, &gc, TAG_OBJECT);
450
451 // Get the function from the stack.
452 __ mov(ecx, Operand(esp, 1 * kPointerSize));
453
454 // Set up the object header.
455 Factory* factory = masm->isolate()->factory();
456 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
457 factory->function_context_map());
458 __ mov(FieldOperand(eax, Context::kLengthOffset),
459 Immediate(Smi::FromInt(length)));
460
461 // Set up the fixed slots.
462 __ Set(ebx, Immediate(0)); // Set to NULL.
463 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
464 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
465 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
466
467 // Copy the global object from the previous context.
468 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
469 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), ebx);
470
471 // Initialize the rest of the slots to undefined.
472 __ mov(ebx, factory->undefined_value());
473 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
474 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
475 }
476
477 // Return and remove the on-stack parameter.
478 __ mov(esi, eax);
479 __ ret(1 * kPointerSize);
480
481 // Need to collect. Call into runtime system.
482 __ bind(&gc);
483 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
484 }
485
486
487 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
488 // Stack layout on entry:
489 //
490 // [esp + (1 * kPointerSize)]: function
491 // [esp + (2 * kPointerSize)]: serialized scope info
492
493 // Try to allocate the context in new space.
494 Label gc;
495 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
496 __ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
497
498 // Get the function or sentinel from the stack.
499 __ mov(ecx, Operand(esp, 1 * kPointerSize));
500
501 // Get the serialized scope info from the stack.
502 __ mov(ebx, Operand(esp, 2 * kPointerSize));
503
504 // Set up the object header.
505 Factory* factory = masm->isolate()->factory();
506 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
507 factory->block_context_map());
508 __ mov(FieldOperand(eax, Context::kLengthOffset),
509 Immediate(Smi::FromInt(length)));
510
511 // If this block context is nested in the native context we get a smi
512 // sentinel instead of a function. The block context should get the
513 // canonical empty function of the native context as its closure which
514 // we still have to look up.
515 Label after_sentinel;
516 __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
517 if (FLAG_debug_code) {
518 __ cmp(ecx, 0);
519 __ Assert(equal, kExpected0AsASmiSentinel);
520 }
521 __ mov(ecx, GlobalObjectOperand());
522 __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
523 __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
524 __ bind(&after_sentinel);
525
526 // Set up the fixed slots.
527 __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
528 __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
529 __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
530
531 // Copy the global object from the previous context.
532 __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
533 __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
534
535 // Initialize the rest of the slots to the hole value.
536 if (slots_ == 1) {
537 __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
538 factory->the_hole_value());
539 } else {
540 __ mov(ebx, factory->the_hole_value());
541 for (int i = 0; i < slots_; i++) {
542 __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
543 }
544 }
545
546 // Return and remove the on-stack parameters.
547 __ mov(esi, eax);
548 __ ret(2 * kPointerSize);
549
550 // Need to collect. Call into runtime system.
551 __ bind(&gc);
552 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
553 }
554
555
556 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 486 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
557 // We don't allow a GC during a store buffer overflow so there is no need to 487 // We don't allow a GC during a store buffer overflow so there is no need to
558 // store the registers in any particular way, but we do have to store and 488 // store the registers in any particular way, but we do have to store and
559 // restore them. 489 // restore them.
560 __ pushad(); 490 __ pushad();
561 if (save_doubles_ == kSaveFPRegs) { 491 if (save_doubles_ == kSaveFPRegs) {
562 CpuFeatureScope scope(masm, SSE2); 492 CpuFeatureScope scope(masm, SSE2);
563 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); 493 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
564 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 494 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
565 XMMRegister reg = XMMRegister::from_code(i); 495 XMMRegister reg = XMMRegister::from_code(i);
(...skipping 1485 matching lines...) Expand 10 before | Expand all | Expand 10 after
2051 1981
2052 // (11) Sliced string. Replace subject with parent. Go to (5a). 1982 // (11) Sliced string. Replace subject with parent. Go to (5a).
2053 // Load offset into edi and replace subject string with parent. 1983 // Load offset into edi and replace subject string with parent.
2054 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); 1984 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
2055 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); 1985 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
2056 __ jmp(&check_underlying); // Go to (5a). 1986 __ jmp(&check_underlying); // Go to (5a).
2057 #endif // V8_INTERPRETED_REGEXP 1987 #endif // V8_INTERPRETED_REGEXP
2058 } 1988 }
2059 1989
2060 1990
2061 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2062 const int kMaxInlineLength = 100;
2063 Label slowcase;
2064 Label done;
2065 __ mov(ebx, Operand(esp, kPointerSize * 3));
2066 __ JumpIfNotSmi(ebx, &slowcase);
2067 __ cmp(ebx, Immediate(Smi::FromInt(kMaxInlineLength)));
2068 __ j(above, &slowcase);
2069 // Smi-tagging is equivalent to multiplying by 2.
2070 STATIC_ASSERT(kSmiTag == 0);
2071 STATIC_ASSERT(kSmiTagSize == 1);
2072 // Allocate RegExpResult followed by FixedArray with size in ebx.
2073 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2074 // Elements: [Map][Length][..elements..]
2075 __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2076 times_pointer_size,
2077 ebx, // In: Number of elements as a smi
2078 REGISTER_VALUE_IS_SMI,
2079 eax, // Out: Start of allocation (tagged).
2080 ecx, // Out: End of allocation.
2081 edx, // Scratch register
2082 &slowcase,
2083 TAG_OBJECT);
2084 // eax: Start of allocated area, object-tagged.
2085
2086 // Set JSArray map to global.regexp_result_map().
2087 // Set empty properties FixedArray.
2088 // Set elements to point to FixedArray allocated right after the JSArray.
2089 // Interleave operations for better latency.
2090 __ mov(edx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
2091 Factory* factory = masm->isolate()->factory();
2092 __ mov(ecx, Immediate(factory->empty_fixed_array()));
2093 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
2094 __ mov(edx, FieldOperand(edx, GlobalObject::kNativeContextOffset));
2095 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
2096 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
2097 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
2098 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
2099
2100 // Set input, index and length fields from arguments.
2101 __ mov(ecx, Operand(esp, kPointerSize * 1));
2102 __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
2103 __ mov(ecx, Operand(esp, kPointerSize * 2));
2104 __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
2105 __ mov(ecx, Operand(esp, kPointerSize * 3));
2106 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
2107
2108 // Fill out the elements FixedArray.
2109 // eax: JSArray.
2110 // ebx: FixedArray.
2111 // ecx: Number of elements in array, as smi.
2112
2113 // Set map.
2114 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
2115 Immediate(factory->fixed_array_map()));
2116 // Set length.
2117 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
2118 // Fill contents of fixed-array with undefined.
2119 __ SmiUntag(ecx);
2120 __ mov(edx, Immediate(factory->undefined_value()));
2121 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
2122 // Fill fixed array elements with undefined.
2123 // eax: JSArray.
2124 // ecx: Number of elements to fill.
2125 // ebx: Start of elements in FixedArray.
2126 // edx: undefined.
2127 Label loop;
2128 __ test(ecx, ecx);
2129 __ bind(&loop);
2130 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero.
2131 __ sub(ecx, Immediate(1));
2132 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
2133 __ jmp(&loop);
2134
2135 __ bind(&done);
2136 __ ret(3 * kPointerSize);
2137
2138 __ bind(&slowcase);
2139 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2140 }
2141
2142
2143 static int NegativeComparisonResult(Condition cc) { 1991 static int NegativeComparisonResult(Condition cc) {
2144 ASSERT(cc != equal); 1992 ASSERT(cc != equal);
2145 ASSERT((cc == less) || (cc == less_equal) 1993 ASSERT((cc == less) || (cc == less_equal)
2146 || (cc == greater) || (cc == greater_equal)); 1994 || (cc == greater) || (cc == greater_equal));
2147 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 1995 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2148 } 1996 }
2149 1997
2150 1998
2151 static void CheckInputType(MacroAssembler* masm, 1999 static void CheckInputType(MacroAssembler* masm,
2152 Register input, 2000 Register input,
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after
2467 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 2315 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2468 // tagged as a small integer. 2316 // tagged as a small integer.
2469 __ InvokeBuiltin(builtin, JUMP_FUNCTION); 2317 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2470 2318
2471 __ bind(&miss); 2319 __ bind(&miss);
2472 GenerateMiss(masm); 2320 GenerateMiss(masm);
2473 } 2321 }
2474 2322
2475 2323
2476 static void GenerateRecordCallTarget(MacroAssembler* masm) { 2324 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2477 // Cache the called function in a global property cell. Cache states 2325 // Cache the called function in a feedback vector slot. Cache states
2478 // are uninitialized, monomorphic (indicated by a JSFunction), and 2326 // are uninitialized, monomorphic (indicated by a JSFunction), and
2479 // megamorphic. 2327 // megamorphic.
2480 // eax : number of arguments to the construct function 2328 // eax : number of arguments to the construct function
2481 // ebx : cache cell for call target 2329 // ebx : Feedback vector
2330 // edx : slot in feedback vector (Smi)
2482 // edi : the function to call 2331 // edi : the function to call
2483 Isolate* isolate = masm->isolate(); 2332 Isolate* isolate = masm->isolate();
2484 Label initialize, done, miss, megamorphic, not_array_function; 2333 Label initialize, done, miss, megamorphic, not_array_function;
2485 2334
2486 // Load the cache state into ecx. 2335 // Load the cache state into ecx.
2487 __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset)); 2336 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2337 FixedArray::kHeaderSize));
2488 2338
2489 // A monomorphic cache hit or an already megamorphic state: invoke the 2339 // A monomorphic cache hit or an already megamorphic state: invoke the
2490 // function without changing the state. 2340 // function without changing the state.
2491 __ cmp(ecx, edi); 2341 __ cmp(ecx, edi);
2492 __ j(equal, &done); 2342 __ j(equal, &done, Label::kFar);
2493 __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); 2343 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2494 __ j(equal, &done); 2344 __ j(equal, &done, Label::kFar);
2495 2345
2496 // If we came here, we need to see if we are the array function. 2346 // If we came here, we need to see if we are the array function.
2497 // If we didn't have a matching function, and we didn't find the megamorph 2347 // If we didn't have a matching function, and we didn't find the megamorph
2498 // sentinel, then we have in the cell either some other function or an 2348 // sentinel, then we have in the slot either some other function or an
2499 // AllocationSite. Do a map check on the object in ecx. 2349 // AllocationSite. Do a map check on the object in ecx.
2500 Handle<Map> allocation_site_map = 2350 Handle<Map> allocation_site_map =
2501 masm->isolate()->factory()->allocation_site_map(); 2351 masm->isolate()->factory()->allocation_site_map();
2502 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); 2352 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
2503 __ j(not_equal, &miss); 2353 __ j(not_equal, &miss);
2504 2354
2505 // Load the global or builtins object from the current context
2506 __ LoadGlobalContext(ecx);
2507 // Make sure the function is the Array() function 2355 // Make sure the function is the Array() function
2508 __ cmp(edi, Operand(ecx, 2356 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2509 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 2357 __ cmp(edi, ecx);
2510 __ j(not_equal, &megamorphic); 2358 __ j(not_equal, &megamorphic);
2511 __ jmp(&done); 2359 __ jmp(&done, Label::kFar);
2512 2360
2513 __ bind(&miss); 2361 __ bind(&miss);
2514 2362
2515 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 2363 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2516 // megamorphic. 2364 // megamorphic.
2517 __ cmp(ecx, Immediate(TypeFeedbackCells::UninitializedSentinel(isolate))); 2365 __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
2518 __ j(equal, &initialize); 2366 __ j(equal, &initialize);
2519 // MegamorphicSentinel is an immortal immovable object (undefined) so no 2367 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2520 // write-barrier is needed. 2368 // write-barrier is needed.
2521 __ bind(&megamorphic); 2369 __ bind(&megamorphic);
2522 __ mov(FieldOperand(ebx, Cell::kValueOffset), 2370 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2523 Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); 2371 FixedArray::kHeaderSize),
2524 __ jmp(&done, Label::kNear); 2372 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2373 __ jmp(&done, Label::kFar);
2525 2374
2526 // An uninitialized cache is patched with the function or sentinel to 2375 // An uninitialized cache is patched with the function or sentinel to
2527 // indicate the ElementsKind if function is the Array constructor. 2376 // indicate the ElementsKind if function is the Array constructor.
2528 __ bind(&initialize); 2377 __ bind(&initialize);
2529 __ LoadGlobalContext(ecx);
2530 // Make sure the function is the Array() function 2378 // Make sure the function is the Array() function
2531 __ cmp(edi, Operand(ecx, 2379 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2532 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 2380 __ cmp(edi, ecx);
2533 __ j(not_equal, &not_array_function); 2381 __ j(not_equal, &not_array_function);
2534 2382
2535 // The target function is the Array constructor, 2383 // The target function is the Array constructor,
2536 // Create an AllocationSite if we don't already have it, store it in the cell 2384 // Create an AllocationSite if we don't already have it, store it in the slot.
2537 { 2385 {
2538 FrameScope scope(masm, StackFrame::INTERNAL); 2386 FrameScope scope(masm, StackFrame::INTERNAL);
2539 2387
2540 // Arguments register must be smi-tagged to call out. 2388 // Arguments register must be smi-tagged to call out.
2541 __ SmiTag(eax); 2389 __ SmiTag(eax);
2542 __ push(eax); 2390 __ push(eax);
2543 __ push(edi); 2391 __ push(edi);
2392 __ push(edx);
2544 __ push(ebx); 2393 __ push(ebx);
2545 2394
2546 CreateAllocationSiteStub create_stub; 2395 CreateAllocationSiteStub create_stub;
2547 __ CallStub(&create_stub); 2396 __ CallStub(&create_stub);
2548 2397
2549 __ pop(ebx); 2398 __ pop(ebx);
2399 __ pop(edx);
2550 __ pop(edi); 2400 __ pop(edi);
2551 __ pop(eax); 2401 __ pop(eax);
2552 __ SmiUntag(eax); 2402 __ SmiUntag(eax);
2553 } 2403 }
2554 __ jmp(&done); 2404 __ jmp(&done);
2555 2405
2556 __ bind(&not_array_function); 2406 __ bind(&not_array_function);
2557 __ mov(FieldOperand(ebx, Cell::kValueOffset), edi); 2407 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2558 // No need for a write barrier here - cells are rescanned. 2408 FixedArray::kHeaderSize),
2409 edi);
2410 // We won't need edx or ebx anymore, just save edi
2411 __ push(edi);
2412 __ push(ebx);
2413 __ push(edx);
2414 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
2415 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2416 __ pop(edx);
2417 __ pop(ebx);
2418 __ pop(edi);
2559 2419
2560 __ bind(&done); 2420 __ bind(&done);
2561 } 2421 }
2562 2422
2563 2423
2564 void CallFunctionStub::Generate(MacroAssembler* masm) { 2424 void CallFunctionStub::Generate(MacroAssembler* masm) {
2565 // ebx : cache cell for call target 2425 // ebx : feedback vector
2426 // edx : (only if ebx is not undefined) slot in feedback vector (Smi)
2566 // edi : the function to call 2427 // edi : the function to call
2567 Isolate* isolate = masm->isolate(); 2428 Isolate* isolate = masm->isolate();
2568 Label slow, non_function; 2429 Label slow, non_function, wrap, cont;
2569 2430
2570 // Check that the function really is a JavaScript function. 2431 if (NeedsChecks()) {
2571 __ JumpIfSmi(edi, &non_function); 2432 // Check that the function really is a JavaScript function.
2433 __ JumpIfSmi(edi, &non_function);
2572 2434
2573 // Goto slow case if we do not have a function. 2435 // Goto slow case if we do not have a function.
2574 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2436 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2575 __ j(not_equal, &slow); 2437 __ j(not_equal, &slow);
2576 2438
2577 if (RecordCallTarget()) { 2439 if (RecordCallTarget()) {
2578 GenerateRecordCallTarget(masm); 2440 GenerateRecordCallTarget(masm);
2441 }
2579 } 2442 }
2580 2443
2581 // Fast-case: Just invoke the function. 2444 // Fast-case: Just invoke the function.
2582 ParameterCount actual(argc_); 2445 ParameterCount actual(argc_);
2583 2446
2447 if (CallAsMethod()) {
2448 if (NeedsChecks()) {
2449 // Do not transform the receiver for strict mode functions.
2450 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2451 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
2452 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2453 __ j(not_equal, &cont);
2454
2455 // Do not transform the receiver for natives (shared already in ecx).
2456 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
2457 1 << SharedFunctionInfo::kNativeBitWithinByte);
2458 __ j(not_equal, &cont);
2459 }
2460
2461 // Load the receiver from the stack.
2462 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
2463
2464 if (NeedsChecks()) {
2465 __ JumpIfSmi(eax, &wrap);
2466
2467 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2468 __ j(below, &wrap);
2469 } else {
2470 __ jmp(&wrap);
2471 }
2472
2473 __ bind(&cont);
2474 }
2475
2584 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); 2476 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2585 2477
2586 // Slow-case: Non-function called. 2478 if (NeedsChecks()) {
2587 __ bind(&slow); 2479 // Slow-case: Non-function called.
2588 if (RecordCallTarget()) { 2480 __ bind(&slow);
2589 // If there is a call target cache, mark it megamorphic in the 2481 if (RecordCallTarget()) {
2590 // non-function case. MegamorphicSentinel is an immortal immovable 2482 // If there is a call target cache, mark it megamorphic in the
2591 // object (undefined) so no write barrier is needed. 2483 // non-function case. MegamorphicSentinel is an immortal immovable
2592 __ mov(FieldOperand(ebx, Cell::kValueOffset), 2484 // object (undefined) so no write barrier is needed.
2593 Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate))); 2485 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2594 } 2486 FixedArray::kHeaderSize),
2595 // Check for function proxy. 2487 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2596 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); 2488 }
2597 __ j(not_equal, &non_function); 2489 // Check for function proxy.
2598 __ pop(ecx); 2490 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2599 __ push(edi); // put proxy as additional argument under return address 2491 __ j(not_equal, &non_function);
2600 __ push(ecx); 2492 __ pop(ecx);
2601 __ Set(eax, Immediate(argc_ + 1)); 2493 __ push(edi); // put proxy as additional argument under return address
2602 __ Set(ebx, Immediate(0)); 2494 __ push(ecx);
2603 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); 2495 __ Set(eax, Immediate(argc_ + 1));
2604 { 2496 __ Set(ebx, Immediate(0));
2497 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2498 {
2499 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2500 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2501 }
2502
2503 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2504 // of the original receiver from the call site).
2505 __ bind(&non_function);
2506 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
2507 __ Set(eax, Immediate(argc_));
2508 __ Set(ebx, Immediate(0));
2509 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2605 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); 2510 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2606 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2511 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2607 } 2512 }
2608 2513
2609 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2514 if (CallAsMethod()) {
2610 // of the original receiver from the call site). 2515 __ bind(&wrap);
2611 __ bind(&non_function); 2516 // Wrap the receiver and patch it back onto the stack.
2612 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); 2517 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2613 __ Set(eax, Immediate(argc_)); 2518 __ push(edi);
2614 __ Set(ebx, Immediate(0)); 2519 __ push(eax);
2615 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); 2520 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2616 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); 2521 __ pop(edi);
2617 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2522 }
2523 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
2524 __ jmp(&cont);
2525 }
2618 } 2526 }
2619 2527
2620 2528
2621 void CallConstructStub::Generate(MacroAssembler* masm) { 2529 void CallConstructStub::Generate(MacroAssembler* masm) {
2622 // eax : number of arguments 2530 // eax : number of arguments
2623 // ebx : cache cell for call target 2531 // ebx : feedback vector
2532 // edx : (only if ebx is not undefined) slot in feedback vector (Smi)
2624 // edi : constructor function 2533 // edi : constructor function
2625 Label slow, non_function_call; 2534 Label slow, non_function_call;
2626 2535
2627 // Check that function is not a smi. 2536 // Check that function is not a smi.
2628 __ JumpIfSmi(edi, &non_function_call); 2537 __ JumpIfSmi(edi, &non_function_call);
2629 // Check that function is a JSFunction. 2538 // Check that function is a JSFunction.
2630 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2539 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2631 __ j(not_equal, &slow); 2540 __ j(not_equal, &slow);
2632 2541
2633 if (RecordCallTarget()) { 2542 if (RecordCallTarget()) {
(...skipping 736 matching lines...) Expand 10 before | Expand all | Expand 10 after
3370 if (!result_.is(eax)) { 3279 if (!result_.is(eax)) {
3371 __ mov(result_, eax); 3280 __ mov(result_, eax);
3372 } 3281 }
3373 call_helper.AfterCall(masm); 3282 call_helper.AfterCall(masm);
3374 __ jmp(&exit_); 3283 __ jmp(&exit_);
3375 3284
3376 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 3285 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3377 } 3286 }
3378 3287
3379 3288
3380 void StringAddStub::Generate(MacroAssembler* masm) {
3381 Label call_runtime, call_builtin;
3382 Builtins::JavaScript builtin_id = Builtins::ADD;
3383
3384 // Load the two arguments.
3385 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
3386 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
3387
3388 // Make sure that both arguments are strings if not known in advance.
3389 // Otherwise, at least one of the arguments is definitely a string,
3390 // and we convert the one that is not known to be a string.
3391 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3392 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
3393 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
3394 __ JumpIfSmi(eax, &call_runtime);
3395 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
3396 __ j(above_equal, &call_runtime);
3397
3398 // First argument is a a string, test second.
3399 __ JumpIfSmi(edx, &call_runtime);
3400 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
3401 __ j(above_equal, &call_runtime);
3402 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
3403 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
3404 GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
3405 &call_builtin);
3406 builtin_id = Builtins::STRING_ADD_RIGHT;
3407 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
3408 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
3409 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
3410 &call_builtin);
3411 builtin_id = Builtins::STRING_ADD_LEFT;
3412 }
3413
3414 // Both arguments are strings.
3415 // eax: first string
3416 // edx: second string
3417 // Check if either of the strings are empty. In that case return the other.
3418 Label second_not_zero_length, both_not_zero_length;
3419 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
3420 STATIC_ASSERT(kSmiTag == 0);
3421 __ test(ecx, ecx);
3422 __ j(not_zero, &second_not_zero_length, Label::kNear);
3423 // Second string is empty, result is first string which is already in eax.
3424 Counters* counters = masm->isolate()->counters();
3425 __ IncrementCounter(counters->string_add_native(), 1);
3426 __ ret(2 * kPointerSize);
3427 __ bind(&second_not_zero_length);
3428 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3429 STATIC_ASSERT(kSmiTag == 0);
3430 __ test(ebx, ebx);
3431 __ j(not_zero, &both_not_zero_length, Label::kNear);
3432 // First string is empty, result is second string which is in edx.
3433 __ mov(eax, edx);
3434 __ IncrementCounter(counters->string_add_native(), 1);
3435 __ ret(2 * kPointerSize);
3436
3437 // Both strings are non-empty.
3438 // eax: first string
3439 // ebx: length of first string as a smi
3440 // ecx: length of second string as a smi
3441 // edx: second string
3442 // Look at the length of the result of adding the two strings.
3443 Label string_add_flat_result, longer_than_two;
3444 __ bind(&both_not_zero_length);
3445 __ add(ebx, ecx);
3446 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
3447 // Handle exceptionally long strings in the runtime system.
3448 __ j(overflow, &call_runtime);
3449 // Use the string table when adding two one character strings, as it
3450 // helps later optimizations to return an internalized string here.
3451 __ cmp(ebx, Immediate(Smi::FromInt(2)));
3452 __ j(not_equal, &longer_than_two);
3453
3454 // Check that both strings are non-external ASCII strings.
3455 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, &call_runtime);
3456
3457 // Get the two characters forming the new string.
3458 __ movzx_b(ebx, FieldOperand(eax, SeqOneByteString::kHeaderSize));
3459 __ movzx_b(ecx, FieldOperand(edx, SeqOneByteString::kHeaderSize));
3460
3461 // Try to lookup two character string in string table. If it is not found
3462 // just allocate a new one.
3463 Label make_two_character_string, make_two_character_string_no_reload;
3464 StringHelper::GenerateTwoCharacterStringTableProbe(
3465 masm, ebx, ecx, eax, edx, edi,
3466 &make_two_character_string_no_reload, &make_two_character_string);
3467 __ IncrementCounter(counters->string_add_native(), 1);
3468 __ ret(2 * kPointerSize);
3469
3470 // Allocate a two character string.
3471 __ bind(&make_two_character_string);
3472 // Reload the arguments.
3473 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
3474 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
3475 // Get the two characters forming the new string.
3476 __ movzx_b(ebx, FieldOperand(eax, SeqOneByteString::kHeaderSize));
3477 __ movzx_b(ecx, FieldOperand(edx, SeqOneByteString::kHeaderSize));
3478 __ bind(&make_two_character_string_no_reload);
3479 __ IncrementCounter(counters->string_add_make_two_char(), 1);
3480 __ AllocateAsciiString(eax, 2, edi, edx, &call_runtime);
3481 // Pack both characters in ebx.
3482 __ shl(ecx, kBitsPerByte);
3483 __ or_(ebx, ecx);
3484 // Set the characters in the new string.
3485 __ mov_w(FieldOperand(eax, SeqOneByteString::kHeaderSize), ebx);
3486 __ IncrementCounter(counters->string_add_native(), 1);
3487 __ ret(2 * kPointerSize);
3488
3489 __ bind(&longer_than_two);
3490 // Check if resulting string will be flat.
3491 __ cmp(ebx, Immediate(Smi::FromInt(ConsString::kMinLength)));
3492 __ j(below, &string_add_flat_result);
3493
3494 // If result is not supposed to be flat allocate a cons string object. If both
3495 // strings are ASCII the result is an ASCII cons string.
3496 Label non_ascii, allocated, ascii_data;
3497 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
3498 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
3499 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
3500 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
3501 __ and_(ecx, edi);
3502 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3503 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3504 __ test(ecx, Immediate(kStringEncodingMask));
3505 __ j(zero, &non_ascii);
3506 __ bind(&ascii_data);
3507 // Allocate an ASCII cons string.
3508 __ AllocateAsciiConsString(ecx, edi, no_reg, &call_runtime);
3509 __ bind(&allocated);
3510 // Fill the fields of the cons string.
3511 __ AssertSmi(ebx);
3512 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
3513 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
3514 Immediate(String::kEmptyHashField));
3515
3516 Label skip_write_barrier, after_writing;
3517 ExternalReference high_promotion_mode = ExternalReference::
3518 new_space_high_promotion_mode_active_address(masm->isolate());
3519 __ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
3520 __ j(zero, &skip_write_barrier);
3521
3522 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
3523 __ RecordWriteField(ecx,
3524 ConsString::kFirstOffset,
3525 eax,
3526 ebx,
3527 kDontSaveFPRegs);
3528 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
3529 __ RecordWriteField(ecx,
3530 ConsString::kSecondOffset,
3531 edx,
3532 ebx,
3533 kDontSaveFPRegs);
3534 __ jmp(&after_writing);
3535
3536 __ bind(&skip_write_barrier);
3537 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
3538 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
3539
3540 __ bind(&after_writing);
3541
3542 __ mov(eax, ecx);
3543 __ IncrementCounter(counters->string_add_native(), 1);
3544 __ ret(2 * kPointerSize);
3545 __ bind(&non_ascii);
3546 // At least one of the strings is two-byte. Check whether it happens
3547 // to contain only one byte characters.
3548 // ecx: first instance type AND second instance type.
3549 // edi: second instance type.
3550 __ test(ecx, Immediate(kOneByteDataHintMask));
3551 __ j(not_zero, &ascii_data);
3552 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3553 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3554 __ xor_(edi, ecx);
3555 STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
3556 __ and_(edi, kOneByteStringTag | kOneByteDataHintTag);
3557 __ cmp(edi, kOneByteStringTag | kOneByteDataHintTag);
3558 __ j(equal, &ascii_data);
3559 // Allocate a two byte cons string.
3560 __ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
3561 __ jmp(&allocated);
3562
3563 // We cannot encounter sliced strings or cons strings here since:
3564 STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
3565 // Handle creating a flat result from either external or sequential strings.
3566 // Locate the first characters' locations.
3567 // eax: first string
3568 // ebx: length of resulting flat string as a smi
3569 // edx: second string
3570 Label first_prepared, second_prepared;
3571 Label first_is_sequential, second_is_sequential;
3572 __ bind(&string_add_flat_result);
3573 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3574 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3575 // ecx: instance type of first string
3576 STATIC_ASSERT(kSeqStringTag == 0);
3577 __ test_b(ecx, kStringRepresentationMask);
3578 __ j(zero, &first_is_sequential, Label::kNear);
3579 // Rule out short external string and load string resource.
3580 STATIC_ASSERT(kShortExternalStringTag != 0);
3581 __ test_b(ecx, kShortExternalStringMask);
3582 __ j(not_zero, &call_runtime);
3583 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
3584 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3585 __ jmp(&first_prepared, Label::kNear);
3586 __ bind(&first_is_sequential);
3587 __ add(eax, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3588 __ bind(&first_prepared);
3589
3590 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
3591 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
3592 // Check whether both strings have same encoding.
3593 // edi: instance type of second string
3594 __ xor_(ecx, edi);
3595 __ test_b(ecx, kStringEncodingMask);
3596 __ j(not_zero, &call_runtime);
3597 STATIC_ASSERT(kSeqStringTag == 0);
3598 __ test_b(edi, kStringRepresentationMask);
3599 __ j(zero, &second_is_sequential, Label::kNear);
3600 // Rule out short external string and load string resource.
3601 STATIC_ASSERT(kShortExternalStringTag != 0);
3602 __ test_b(edi, kShortExternalStringMask);
3603 __ j(not_zero, &call_runtime);
3604 __ mov(edx, FieldOperand(edx, ExternalString::kResourceDataOffset));
3605 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3606 __ jmp(&second_prepared, Label::kNear);
3607 __ bind(&second_is_sequential);
3608 __ add(edx, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3609 __ bind(&second_prepared);
3610
3611 // Push the addresses of both strings' first characters onto the stack.
3612 __ push(edx);
3613 __ push(eax);
3614
3615 Label non_ascii_string_add_flat_result, call_runtime_drop_two;
3616 // edi: instance type of second string
3617 // First string and second string have the same encoding.
3618 STATIC_ASSERT(kTwoByteStringTag == 0);
3619 __ test_b(edi, kStringEncodingMask);
3620 __ j(zero, &non_ascii_string_add_flat_result);
3621
3622 // Both strings are ASCII strings.
3623 // ebx: length of resulting flat string as a smi
3624 __ SmiUntag(ebx);
3625 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
3626 // eax: result string
3627 __ mov(ecx, eax);
3628 // Locate first character of result.
3629 __ add(ecx, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3630 // Load first argument's length and first character location. Account for
3631 // values currently on the stack when fetching arguments from it.
3632 __ mov(edx, Operand(esp, 4 * kPointerSize));
3633 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3634 __ SmiUntag(edi);
3635 __ pop(edx);
3636 // eax: result string
3637 // ecx: first character of result
3638 // edx: first char of first argument
3639 // edi: length of first argument
3640 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
3641 // Load second argument's length and first character location. Account for
3642 // values currently on the stack when fetching arguments from it.
3643 __ mov(edx, Operand(esp, 2 * kPointerSize));
3644 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3645 __ SmiUntag(edi);
3646 __ pop(edx);
3647 // eax: result string
3648 // ecx: next character of result
3649 // edx: first char of second argument
3650 // edi: length of second argument
3651 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
3652 __ IncrementCounter(counters->string_add_native(), 1);
3653 __ ret(2 * kPointerSize);
3654
3655 // Handle creating a flat two byte result.
3656 // eax: first string - known to be two byte
3657 // ebx: length of resulting flat string as a smi
3658 // edx: second string
3659 __ bind(&non_ascii_string_add_flat_result);
3660 // Both strings are two byte strings.
3661 __ SmiUntag(ebx);
3662 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
3663 // eax: result string
3664 __ mov(ecx, eax);
3665 // Locate first character of result.
3666 __ add(ecx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3667 // Load second argument's length and first character location. Account for
3668 // values currently on the stack when fetching arguments from it.
3669 __ mov(edx, Operand(esp, 4 * kPointerSize));
3670 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3671 __ SmiUntag(edi);
3672 __ pop(edx);
3673 // eax: result string
3674 // ecx: first character of result
3675 // edx: first char of first argument
3676 // edi: length of first argument
3677 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
3678 // Load second argument's length and first character location. Account for
3679 // values currently on the stack when fetching arguments from it.
3680 __ mov(edx, Operand(esp, 2 * kPointerSize));
3681 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3682 __ SmiUntag(edi);
3683 __ pop(edx);
3684 // eax: result string
3685 // ecx: next character of result
3686 // edx: first char of second argument
3687 // edi: length of second argument
3688 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
3689 __ IncrementCounter(counters->string_add_native(), 1);
3690 __ ret(2 * kPointerSize);
3691
3692 // Recover stack pointer before jumping to runtime.
3693 __ bind(&call_runtime_drop_two);
3694 __ Drop(2);
3695 // Just jump to runtime to add the two strings.
3696 __ bind(&call_runtime);
3697 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
3698
3699 if (call_builtin.is_linked()) {
3700 __ bind(&call_builtin);
3701 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
3702 }
3703 }
3704
3705
3706 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3707 __ push(eax);
3708 __ push(edx);
3709 }
3710
3711
3712 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
3713 Register temp) {
3714 __ pop(temp);
3715 __ pop(edx);
3716 __ pop(eax);
3717 __ push(temp);
3718 }
3719
3720
3721 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
3722 int stack_offset,
3723 Register arg,
3724 Register scratch1,
3725 Register scratch2,
3726 Register scratch3,
3727 Label* slow) {
3728 // First check if the argument is already a string.
3729 Label not_string, done;
3730 __ JumpIfSmi(arg, &not_string);
3731 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
3732 __ j(below, &done);
3733
3734 // Check the number to string cache.
3735 __ bind(&not_string);
3736 // Puts the cached result into scratch1.
3737 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow);
3738 __ mov(arg, scratch1);
3739 __ mov(Operand(esp, stack_offset), arg);
3740 __ bind(&done);
3741 }
3742
3743
3744 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3745 Register dest,
3746 Register src,
3747 Register count,
3748 Register scratch,
3749 bool ascii) {
3750 Label loop;
3751 __ bind(&loop);
3752 // This loop just copies one character at a time, as it is only used for very
3753 // short strings.
3754 if (ascii) {
3755 __ mov_b(scratch, Operand(src, 0));
3756 __ mov_b(Operand(dest, 0), scratch);
3757 __ add(src, Immediate(1));
3758 __ add(dest, Immediate(1));
3759 } else {
3760 __ mov_w(scratch, Operand(src, 0));
3761 __ mov_w(Operand(dest, 0), scratch);
3762 __ add(src, Immediate(2));
3763 __ add(dest, Immediate(2));
3764 }
3765 __ sub(count, Immediate(1));
3766 __ j(not_zero, &loop);
3767 }
3768
3769
3770 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, 3289 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
3771 Register dest, 3290 Register dest,
3772 Register src, 3291 Register src,
3773 Register count, 3292 Register count,
3774 Register scratch, 3293 Register scratch,
3775 bool ascii) { 3294 bool ascii) {
3776 // Copy characters using rep movs of doublewords. 3295 // Copy characters using rep movs of doublewords.
3777 // The destination is aligned on a 4 byte boundary because we are 3296 // The destination is aligned on a 4 byte boundary because we are
3778 // copying to the beginning of a newly allocated string. 3297 // copying to the beginning of a newly allocated string.
3779 ASSERT(dest.is(edi)); // rep movs destination 3298 ASSERT(dest.is(edi)); // rep movs destination
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
3820 __ mov_b(Operand(dest, 0), scratch); 3339 __ mov_b(Operand(dest, 0), scratch);
3821 __ add(src, Immediate(1)); 3340 __ add(src, Immediate(1));
3822 __ add(dest, Immediate(1)); 3341 __ add(dest, Immediate(1));
3823 __ sub(count, Immediate(1)); 3342 __ sub(count, Immediate(1));
3824 __ j(not_zero, &loop); 3343 __ j(not_zero, &loop);
3825 3344
3826 __ bind(&done); 3345 __ bind(&done);
3827 } 3346 }
3828 3347
3829 3348
3830 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
3831 Register c1,
3832 Register c2,
3833 Register scratch1,
3834 Register scratch2,
3835 Register scratch3,
3836 Label* not_probed,
3837 Label* not_found) {
3838 // Register scratch3 is the general scratch register in this function.
3839 Register scratch = scratch3;
3840
3841 // Make sure that both characters are not digits as such strings has a
3842 // different hash algorithm. Don't try to look for these in the string table.
3843 Label not_array_index;
3844 __ mov(scratch, c1);
3845 __ sub(scratch, Immediate(static_cast<int>('0')));
3846 __ cmp(scratch, Immediate(static_cast<int>('9' - '0')));
3847 __ j(above, &not_array_index, Label::kNear);
3848 __ mov(scratch, c2);
3849 __ sub(scratch, Immediate(static_cast<int>('0')));
3850 __ cmp(scratch, Immediate(static_cast<int>('9' - '0')));
3851 __ j(below_equal, not_probed);
3852
3853 __ bind(&not_array_index);
3854 // Calculate the two character string hash.
3855 Register hash = scratch1;
3856 GenerateHashInit(masm, hash, c1, scratch);
3857 GenerateHashAddCharacter(masm, hash, c2, scratch);
3858 GenerateHashGetHash(masm, hash, scratch);
3859
3860 // Collect the two characters in a register.
3861 Register chars = c1;
3862 __ shl(c2, kBitsPerByte);
3863 __ or_(chars, c2);
3864
3865 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3866 // hash: hash of two character string.
3867
3868 // Load the string table.
3869 Register string_table = c2;
3870 __ LoadRoot(string_table, Heap::kStringTableRootIndex);
3871
3872 // Calculate capacity mask from the string table capacity.
3873 Register mask = scratch2;
3874 __ mov(mask, FieldOperand(string_table, StringTable::kCapacityOffset));
3875 __ SmiUntag(mask);
3876 __ sub(mask, Immediate(1));
3877
3878 // Registers
3879 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3880 // hash: hash of two character string
3881 // string_table: string table
3882 // mask: capacity mask
3883 // scratch: -
3884
3885 // Perform a number of probes in the string table.
3886 static const int kProbes = 4;
3887 Label found_in_string_table;
3888 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
3889 Register candidate = scratch; // Scratch register contains candidate.
3890 for (int i = 0; i < kProbes; i++) {
3891 // Calculate entry in string table.
3892 __ mov(scratch, hash);
3893 if (i > 0) {
3894 __ add(scratch, Immediate(StringTable::GetProbeOffset(i)));
3895 }
3896 __ and_(scratch, mask);
3897
3898 // Load the entry from the string table.
3899 STATIC_ASSERT(StringTable::kEntrySize == 1);
3900 __ mov(candidate,
3901 FieldOperand(string_table,
3902 scratch,
3903 times_pointer_size,
3904 StringTable::kElementsStartOffset));
3905
3906 // If entry is undefined no string with this hash can be found.
3907 Factory* factory = masm->isolate()->factory();
3908 __ cmp(candidate, factory->undefined_value());
3909 __ j(equal, not_found);
3910 __ cmp(candidate, factory->the_hole_value());
3911 __ j(equal, &next_probe[i]);
3912
3913 // If length is not 2 the string is not a candidate.
3914 __ cmp(FieldOperand(candidate, String::kLengthOffset),
3915 Immediate(Smi::FromInt(2)));
3916 __ j(not_equal, &next_probe[i]);
3917
3918 // As we are out of registers save the mask on the stack and use that
3919 // register as a temporary.
3920 __ push(mask);
3921 Register temp = mask;
3922
3923 // Check that the candidate is a non-external ASCII string.
3924 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
3925 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
3926 __ JumpIfInstanceTypeIsNotSequentialAscii(
3927 temp, temp, &next_probe_pop_mask[i]);
3928
3929 // Check if the two characters match.
3930 __ mov(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
3931 __ and_(temp, 0x0000ffff);
3932 __ cmp(chars, temp);
3933 __ j(equal, &found_in_string_table);
3934 __ bind(&next_probe_pop_mask[i]);
3935 __ pop(mask);
3936 __ bind(&next_probe[i]);
3937 }
3938
3939 // No matching 2 character string found by probing.
3940 __ jmp(not_found);
3941
3942 // Scratch register contains result when we fall through to here.
3943 Register result = candidate;
3944 __ bind(&found_in_string_table);
3945 __ pop(mask); // Pop saved mask from the stack.
3946 if (!result.is(eax)) {
3947 __ mov(eax, result);
3948 }
3949 }
3950
3951
3952 void StringHelper::GenerateHashInit(MacroAssembler* masm, 3349 void StringHelper::GenerateHashInit(MacroAssembler* masm,
3953 Register hash, 3350 Register hash,
3954 Register character, 3351 Register character,
3955 Register scratch) { 3352 Register scratch) {
3956 // hash = (seed + character) + ((seed + character) << 10); 3353 // hash = (seed + character) + ((seed + character) << 10);
3957 if (Serializer::enabled()) { 3354 if (Serializer::enabled()) {
3958 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); 3355 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3959 __ SmiUntag(scratch); 3356 __ SmiUntag(scratch);
3960 __ add(scratch, character); 3357 __ add(scratch, character);
3961 __ mov(hash, scratch); 3358 __ mov(hash, scratch);
(...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after
4406 __ push(ecx); 3803 __ push(ecx);
4407 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi); 3804 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
4408 3805
4409 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3806 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4410 // tagged as a small integer. 3807 // tagged as a small integer.
4411 __ bind(&runtime); 3808 __ bind(&runtime);
4412 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 3809 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4413 } 3810 }
4414 3811
4415 3812
3813 void ArrayPushStub::Generate(MacroAssembler* masm) {
3814 int argc = arguments_count();
3815
3816 if (argc == 0) {
3817 // Noop, return the length.
3818 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
3819 __ ret((argc + 1) * kPointerSize);
3820 return;
3821 }
3822
3823 Isolate* isolate = masm->isolate();
3824
3825 if (argc != 1) {
3826 __ TailCallExternalReference(
3827 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3828 return;
3829 }
3830
3831 Label call_builtin, attempt_to_grow_elements, with_write_barrier;
3832
3833 // Get the elements array of the object.
3834 __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
3835
3836 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3837 // Check that the elements are in fast mode and writable.
3838 __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
3839 isolate->factory()->fixed_array_map());
3840 __ j(not_equal, &call_builtin);
3841 }
3842
3843 // Get the array's length into eax and calculate new length.
3844 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
3845 STATIC_ASSERT(kSmiTagSize == 1);
3846 STATIC_ASSERT(kSmiTag == 0);
3847 __ add(eax, Immediate(Smi::FromInt(argc)));
3848
3849 // Get the elements' length into ecx.
3850 __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
3851
3852 // Check if we could survive without allocation.
3853 __ cmp(eax, ecx);
3854
3855 if (IsFastSmiOrObjectElementsKind(elements_kind())) {
3856 __ j(greater, &attempt_to_grow_elements);
3857
3858 // Check if value is a smi.
3859 __ mov(ecx, Operand(esp, argc * kPointerSize));
3860 __ JumpIfNotSmi(ecx, &with_write_barrier);
3861
3862 // Store the value.
3863 __ mov(FieldOperand(edi, eax, times_half_pointer_size,
3864 FixedArray::kHeaderSize - argc * kPointerSize),
3865 ecx);
3866 } else {
3867 __ j(greater, &call_builtin);
3868
3869 __ mov(ecx, Operand(esp, argc * kPointerSize));
3870 __ StoreNumberToDoubleElements(
3871 ecx, edi, eax, ecx, xmm0, &call_builtin, true, argc * kDoubleSize);
3872 }
3873
3874 // Save new length.
3875 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
3876 __ ret((argc + 1) * kPointerSize);
3877
3878 if (IsFastDoubleElementsKind(elements_kind())) {
3879 __ bind(&call_builtin);
3880 __ TailCallExternalReference(
3881 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3882 return;
3883 }
3884
3885 __ bind(&with_write_barrier);
3886
3887 if (IsFastSmiElementsKind(elements_kind())) {
3888 if (FLAG_trace_elements_transitions) __ jmp(&call_builtin);
3889
3890 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
3891 isolate->factory()->heap_number_map());
3892 __ j(equal, &call_builtin);
3893
3894 ElementsKind target_kind = IsHoleyElementsKind(elements_kind())
3895 ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
3896 __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
3897 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
3898 __ mov(ebx, ContextOperand(ebx, Context::JS_ARRAY_MAPS_INDEX));
3899 const int header_size = FixedArrayBase::kHeaderSize;
3900 // Verify that the object can be transitioned in place.
3901 const int origin_offset = header_size + elements_kind() * kPointerSize;
3902 __ mov(edi, FieldOperand(ebx, origin_offset));
3903 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
3904 __ j(not_equal, &call_builtin);
3905
3906 const int target_offset = header_size + target_kind * kPointerSize;
3907 __ mov(ebx, FieldOperand(ebx, target_offset));
3908 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
3909 masm, DONT_TRACK_ALLOCATION_SITE, NULL);
3910 // Restore edi used as a scratch register for the write barrier used while
3911 // setting the map.
3912 __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
3913 }
3914
3915 // Save new length.
3916 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
3917
3918 // Store the value.
3919 __ lea(edx, FieldOperand(edi, eax, times_half_pointer_size,
3920 FixedArray::kHeaderSize - argc * kPointerSize));
3921 __ mov(Operand(edx, 0), ecx);
3922
3923 __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
3924 OMIT_SMI_CHECK);
3925
3926 __ ret((argc + 1) * kPointerSize);
3927
3928 __ bind(&attempt_to_grow_elements);
3929 if (!FLAG_inline_new) {
3930 __ bind(&call_builtin);
3931 __ TailCallExternalReference(
3932 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
3933 return;
3934 }
3935
3936 __ mov(ebx, Operand(esp, argc * kPointerSize));
3937 // Growing elements that are SMI-only requires special handling in case the
3938 // new element is non-Smi. For now, delegate to the builtin.
3939 if (IsFastSmiElementsKind(elements_kind())) {
3940 __ JumpIfNotSmi(ebx, &call_builtin);
3941 }
3942
3943 // We could be lucky and the elements array could be at the top of new-space.
3944 // In this case we can just grow it in place by moving the allocation pointer
3945 // up.
3946 ExternalReference new_space_allocation_top =
3947 ExternalReference::new_space_allocation_top_address(isolate);
3948 ExternalReference new_space_allocation_limit =
3949 ExternalReference::new_space_allocation_limit_address(isolate);
3950
3951 const int kAllocationDelta = 4;
3952 ASSERT(kAllocationDelta >= argc);
3953 // Load top.
3954 __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
3955
3956 // Check if it's the end of elements.
3957 __ lea(edx, FieldOperand(edi, eax, times_half_pointer_size,
3958 FixedArray::kHeaderSize - argc * kPointerSize));
3959 __ cmp(edx, ecx);
3960 __ j(not_equal, &call_builtin);
3961 __ add(ecx, Immediate(kAllocationDelta * kPointerSize));
3962 __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
3963 __ j(above, &call_builtin);
3964
3965 // We fit and could grow elements.
3966 __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
3967
3968 // Push the argument...
3969 __ mov(Operand(edx, 0), ebx);
3970 // ... and fill the rest with holes.
3971 for (int i = 1; i < kAllocationDelta; i++) {
3972 __ mov(Operand(edx, i * kPointerSize),
3973 isolate->factory()->the_hole_value());
3974 }
3975
3976 if (IsFastObjectElementsKind(elements_kind())) {
3977 // We know the elements array is in new space so we don't need the
3978 // remembered set, but we just pushed a value onto it so we may have to tell
3979 // the incremental marker to rescan the object that we just grew. We don't
3980 // need to worry about the holes because they are in old space and already
3981 // marked black.
3982 __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
3983 }
3984
3985 // Restore receiver to edx as finish sequence assumes it's here.
3986 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
3987
3988 // Increment element's and array's sizes.
3989 __ add(FieldOperand(edi, FixedArray::kLengthOffset),
3990 Immediate(Smi::FromInt(kAllocationDelta)));
3991
3992 // NOTE: This only happen in new-space, where we don't care about the
3993 // black-byte-count on pages. Otherwise we should update that too if the
3994 // object is black.
3995
3996 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
3997 __ ret((argc + 1) * kPointerSize);
3998
3999 __ bind(&call_builtin);
4000 __ TailCallExternalReference(
4001 ExternalReference(Builtins::c_ArrayPush, isolate), argc + 1, 1);
4002 }
4003
4004
4416 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 4005 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4417 // ----------- S t a t e ------------- 4006 // ----------- S t a t e -------------
4418 // -- edx : left 4007 // -- edx : left
4419 // -- eax : right 4008 // -- eax : right
4420 // -- esp[0] : return address 4009 // -- esp[0] : return address
4421 // ----------------------------------- 4010 // -----------------------------------
4422 Isolate* isolate = masm->isolate(); 4011 Isolate* isolate = masm->isolate();
4423 4012
4424 // Load ecx with the allocation site. We stick an undefined dummy value here 4013 // Load ecx with the allocation site. We stick an undefined dummy value here
4425 // and replace it with the real allocation site later when we instantiate this 4014 // and replace it with the real allocation site later when we instantiate this
(...skipping 911 matching lines...) Expand 10 before | Expand all | Expand 10 after
5337 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4926 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5338 __ pop(ecx); 4927 __ pop(ecx);
5339 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE 4928 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
5340 ? kPointerSize 4929 ? kPointerSize
5341 : 0; 4930 : 0;
5342 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); 4931 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
5343 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. 4932 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
5344 } 4933 }
5345 4934
5346 4935
5347 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
5348 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5349 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5350 __ mov(edi, eax);
5351 int parameter_count_offset =
5352 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5353 __ mov(eax, MemOperand(ebp, parameter_count_offset));
5354 // The parameter count above includes the receiver for the arguments passed to
5355 // the deoptimization handler. Subtract the receiver for the parameter count
5356 // for the call.
5357 __ sub(eax, Immediate(1));
5358 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5359 ParameterCount argument_count(eax);
5360 __ InvokeFunction(edi, argument_count, JUMP_FUNCTION, NullCallWrapper());
5361 }
5362
5363
5364 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4936 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5365 if (masm->isolate()->function_entry_hook() != NULL) { 4937 if (masm->isolate()->function_entry_hook() != NULL) {
5366 ProfileEntryHookStub stub; 4938 ProfileEntryHookStub stub;
5367 masm->CallStub(&stub); 4939 masm->CallStub(&stub);
5368 } 4940 }
5369 } 4941 }
5370 4942
5371 4943
5372 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4944 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5373 // Save volatile registers. 4945 // Save volatile registers.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
5424 // If we reached this point there is a problem. 4996 // If we reached this point there is a problem.
5425 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4997 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5426 } else { 4998 } else {
5427 UNREACHABLE(); 4999 UNREACHABLE();
5428 } 5000 }
5429 } 5001 }
5430 5002
5431 5003
5432 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 5004 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5433 AllocationSiteOverrideMode mode) { 5005 AllocationSiteOverrideMode mode) {
5434 // ebx - type info cell (if mode != DISABLE_ALLOCATION_SITES) 5006 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
5435 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) 5007 // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
5436 // eax - number of arguments 5008 // eax - number of arguments
5437 // edi - constructor? 5009 // edi - constructor?
5438 // esp[0] - return address 5010 // esp[0] - return address
5439 // esp[4] - last argument 5011 // esp[4] - last argument
5440 Label normal_sequence; 5012 Label normal_sequence;
5441 if (mode == DONT_OVERRIDE) { 5013 if (mode == DONT_OVERRIDE) {
5442 ASSERT(FAST_SMI_ELEMENTS == 0); 5014 ASSERT(FAST_SMI_ELEMENTS == 0);
5443 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 5015 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
5444 ASSERT(FAST_ELEMENTS == 2); 5016 ASSERT(FAST_ELEMENTS == 2);
(...skipping 20 matching lines...) Expand all
5465 __ TailCallStub(&stub_holey); 5037 __ TailCallStub(&stub_holey);
5466 5038
5467 __ bind(&normal_sequence); 5039 __ bind(&normal_sequence);
5468 ArraySingleArgumentConstructorStub stub(initial, 5040 ArraySingleArgumentConstructorStub stub(initial,
5469 DISABLE_ALLOCATION_SITES); 5041 DISABLE_ALLOCATION_SITES);
5470 __ TailCallStub(&stub); 5042 __ TailCallStub(&stub);
5471 } else if (mode == DONT_OVERRIDE) { 5043 } else if (mode == DONT_OVERRIDE) {
5472 // We are going to create a holey array, but our kind is non-holey. 5044 // We are going to create a holey array, but our kind is non-holey.
5473 // Fix kind and retry. 5045 // Fix kind and retry.
5474 __ inc(edx); 5046 __ inc(edx);
5475 __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset)); 5047
5476 if (FLAG_debug_code) { 5048 if (FLAG_debug_code) {
5477 Handle<Map> allocation_site_map = 5049 Handle<Map> allocation_site_map =
5478 masm->isolate()->factory()->allocation_site_map(); 5050 masm->isolate()->factory()->allocation_site_map();
5479 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map)); 5051 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
5480 __ Assert(equal, kExpectedAllocationSiteInCell); 5052 __ Assert(equal, kExpectedAllocationSite);
5481 } 5053 }
5482 5054
5483 // Save the resulting elements kind in type info. We can't just store r3 5055 // Save the resulting elements kind in type info. We can't just store r3
5484 // in the AllocationSite::transition_info field because elements kind is 5056 // in the AllocationSite::transition_info field because elements kind is
5485 // restricted to a portion of the field...upper bits need to be left alone. 5057 // restricted to a portion of the field...upper bits need to be left alone.
5486 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5058 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5487 __ add(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset), 5059 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
5488 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley))); 5060 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
5489 5061
5490 __ bind(&normal_sequence); 5062 __ bind(&normal_sequence);
5491 int last_index = GetSequenceIndexFromFastElementsKind( 5063 int last_index = GetSequenceIndexFromFastElementsKind(
5492 TERMINAL_FAST_ELEMENTS_KIND); 5064 TERMINAL_FAST_ELEMENTS_KIND);
5493 for (int i = 0; i <= last_index; ++i) { 5065 for (int i = 0; i <= last_index; ++i) {
5494 Label next; 5066 Label next;
5495 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 5067 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5496 __ cmp(edx, kind); 5068 __ cmp(edx, kind);
5497 __ j(not_equal, &next); 5069 __ j(not_equal, &next);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
5573 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 5145 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5574 } else { 5146 } else {
5575 UNREACHABLE(); 5147 UNREACHABLE();
5576 } 5148 }
5577 } 5149 }
5578 5150
5579 5151
5580 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 5152 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5581 // ----------- S t a t e ------------- 5153 // ----------- S t a t e -------------
5582 // -- eax : argc (only if argument_count_ == ANY) 5154 // -- eax : argc (only if argument_count_ == ANY)
5583 // -- ebx : type info cell 5155 // -- ebx : feedback vector (fixed array or undefined)
5156 // -- edx : slot index (if ebx is fixed array)
5584 // -- edi : constructor 5157 // -- edi : constructor
5585 // -- esp[0] : return address 5158 // -- esp[0] : return address
5586 // -- esp[4] : last argument 5159 // -- esp[4] : last argument
5587 // ----------------------------------- 5160 // -----------------------------------
5588 Handle<Object> undefined_sentinel( 5161 Handle<Object> undefined_sentinel(
5589 masm->isolate()->heap()->undefined_value(), 5162 masm->isolate()->heap()->undefined_value(),
5590 masm->isolate()); 5163 masm->isolate());
5591 5164
5592 if (FLAG_debug_code) { 5165 if (FLAG_debug_code) {
5593 // The array construct code is only set for the global and natives 5166 // The array construct code is only set for the global and natives
5594 // builtin Array functions which always have maps. 5167 // builtin Array functions which always have maps.
5595 5168
5596 // Initial map for the builtin Array function should be a map. 5169 // Initial map for the builtin Array function should be a map.
5597 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 5170 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5598 // Will both indicate a NULL and a Smi. 5171 // Will both indicate a NULL and a Smi.
5599 __ test(ecx, Immediate(kSmiTagMask)); 5172 __ test(ecx, Immediate(kSmiTagMask));
5600 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 5173 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5601 __ CmpObjectType(ecx, MAP_TYPE, ecx); 5174 __ CmpObjectType(ecx, MAP_TYPE, ecx);
5602 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 5175 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5603 5176
5604 // We should either have undefined in ebx or a valid cell 5177 // We should either have undefined in ebx or a valid fixed array.
5605 Label okay_here; 5178 Label okay_here;
5606 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 5179 Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
5607 __ cmp(ebx, Immediate(undefined_sentinel)); 5180 __ cmp(ebx, Immediate(undefined_sentinel));
5608 __ j(equal, &okay_here); 5181 __ j(equal, &okay_here);
5609 __ cmp(FieldOperand(ebx, 0), Immediate(cell_map)); 5182 __ cmp(FieldOperand(ebx, 0), Immediate(fixed_array_map));
5610 __ Assert(equal, kExpectedPropertyCellInRegisterEbx); 5183 __ Assert(equal, kExpectedFixedArrayInRegisterEbx);
5184
5185 // edx should be a smi if we don't have undefined in ebx.
5186 __ AssertSmi(edx);
5187
5611 __ bind(&okay_here); 5188 __ bind(&okay_here);
5612 } 5189 }
5613 5190
5614 Label no_info; 5191 Label no_info;
5615 // If the type cell is undefined, or contains anything other than an 5192 // If the feedback vector is undefined, or contains anything other than an
5616 // AllocationSite, call an array constructor that doesn't use AllocationSites. 5193 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5617 __ cmp(ebx, Immediate(undefined_sentinel)); 5194 __ cmp(ebx, Immediate(undefined_sentinel));
5618 __ j(equal, &no_info); 5195 __ j(equal, &no_info);
5619 __ mov(edx, FieldOperand(ebx, Cell::kValueOffset)); 5196 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
5620 __ cmp(FieldOperand(edx, 0), Immediate( 5197 FixedArray::kHeaderSize));
5198 __ cmp(FieldOperand(ebx, 0), Immediate(
5621 masm->isolate()->factory()->allocation_site_map())); 5199 masm->isolate()->factory()->allocation_site_map()));
5622 __ j(not_equal, &no_info); 5200 __ j(not_equal, &no_info);
5623 5201
5624 // Only look at the lower 16 bits of the transition info. 5202 // Only look at the lower 16 bits of the transition info.
5625 __ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset)); 5203 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
5626 __ SmiUntag(edx); 5204 __ SmiUntag(edx);
5627 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5205 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5628 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); 5206 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
5629 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 5207 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5630 5208
5631 __ bind(&no_info); 5209 __ bind(&no_info);
5632 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 5210 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5633 } 5211 }
5634 5212
5635 5213
(...skipping 29 matching lines...) Expand all
5665 5243
5666 __ bind(&not_one_case); 5244 __ bind(&not_one_case);
5667 InternalArrayNArgumentsConstructorStub stubN(kind); 5245 InternalArrayNArgumentsConstructorStub stubN(kind);
5668 __ TailCallStub(&stubN); 5246 __ TailCallStub(&stubN);
5669 } 5247 }
5670 5248
5671 5249
5672 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 5250 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5673 // ----------- S t a t e ------------- 5251 // ----------- S t a t e -------------
5674 // -- eax : argc 5252 // -- eax : argc
5675 // -- ebx : type info cell
5676 // -- edi : constructor 5253 // -- edi : constructor
5677 // -- esp[0] : return address 5254 // -- esp[0] : return address
5678 // -- esp[4] : last argument 5255 // -- esp[4] : last argument
5679 // ----------------------------------- 5256 // -----------------------------------
5680 5257
5681 if (FLAG_debug_code) { 5258 if (FLAG_debug_code) {
5682 // The array construct code is only set for the global and natives 5259 // The array construct code is only set for the global and natives
5683 // builtin Array functions which always have maps. 5260 // builtin Array functions which always have maps.
5684 5261
5685 // Initial map for the builtin Array function should be a map. 5262 // Initial map for the builtin Array function should be a map.
(...skipping 28 matching lines...) Expand all
5714 Label fast_elements_case; 5291 Label fast_elements_case;
5715 __ cmp(ecx, Immediate(FAST_ELEMENTS)); 5292 __ cmp(ecx, Immediate(FAST_ELEMENTS));
5716 __ j(equal, &fast_elements_case); 5293 __ j(equal, &fast_elements_case);
5717 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 5294 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5718 5295
5719 __ bind(&fast_elements_case); 5296 __ bind(&fast_elements_case);
5720 GenerateCase(masm, FAST_ELEMENTS); 5297 GenerateCase(masm, FAST_ELEMENTS);
5721 } 5298 }
5722 5299
5723 5300
5301 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5302 // ----------- S t a t e -------------
5303 // -- eax : callee
5304 // -- ebx : call_data
5305 // -- ecx : holder
5306 // -- edx : api_function_address
5307 // -- esi : context
5308 // --
5309 // -- esp[0] : return address
5310 // -- esp[4] : last argument
5311 // -- ...
5312 // -- esp[argc * 4] : first argument
5313 // -- esp[(argc + 1) * 4] : receiver
5314 // -----------------------------------
5315
5316 Register callee = eax;
5317 Register call_data = ebx;
5318 Register holder = ecx;
5319 Register api_function_address = edx;
5320 Register return_address = edi;
5321 Register context = esi;
5322
5323 int argc = ArgumentBits::decode(bit_field_);
5324 bool is_store = IsStoreBits::decode(bit_field_);
5325 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
5326
5327 typedef FunctionCallbackArguments FCA;
5328
5329 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5330 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5331 STATIC_ASSERT(FCA::kDataIndex == 4);
5332 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5333 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5334 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5335 STATIC_ASSERT(FCA::kHolderIndex == 0);
5336 STATIC_ASSERT(FCA::kArgsLength == 7);
5337
5338 Isolate* isolate = masm->isolate();
5339
5340 __ pop(return_address);
5341
5342 // context save
5343 __ push(context);
5344 // load context from callee
5345 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
5346
5347 // callee
5348 __ push(callee);
5349
5350 // call data
5351 __ push(call_data);
5352
5353 Register scratch = call_data;
5354 if (!call_data_undefined) {
5355 // return value
5356 __ push(Immediate(isolate->factory()->undefined_value()));
5357 // return value default
5358 __ push(Immediate(isolate->factory()->undefined_value()));
5359 } else {
5360 // return value
5361 __ push(scratch);
5362 // return value default
5363 __ push(scratch);
5364 }
5365 // isolate
5366 __ push(Immediate(reinterpret_cast<int>(isolate)));
5367 // holder
5368 __ push(holder);
5369
5370 __ mov(scratch, esp);
5371
5372 // return address
5373 __ push(return_address);
5374
5375 // API function gets reference to the v8::Arguments. If CPU profiler
5376 // is enabled wrapper function will be called and we need to pass
5377 // address of the callback as additional parameter, always allocate
5378 // space for it.
5379 const int kApiArgc = 1 + 1;
5380
5381 // Allocate the v8::Arguments structure in the arguments' space since
5382 // it's not controlled by GC.
5383 const int kApiStackSpace = 4;
5384
5385 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
5386
5387 // FunctionCallbackInfo::implicit_args_.
5388 __ mov(ApiParameterOperand(2), scratch);
5389 __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5390 // FunctionCallbackInfo::values_.
5391 __ mov(ApiParameterOperand(3), scratch);
5392 // FunctionCallbackInfo::length_.
5393 __ Set(ApiParameterOperand(4), Immediate(argc));
5394 // FunctionCallbackInfo::is_construct_call_.
5395 __ Set(ApiParameterOperand(5), Immediate(0));
5396
5397 // v8::InvocationCallback's argument.
5398 __ lea(scratch, ApiParameterOperand(2));
5399 __ mov(ApiParameterOperand(0), scratch);
5400
5401 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
5402
5403 Operand context_restore_operand(ebp,
5404 (2 + FCA::kContextSaveIndex) * kPointerSize);
5405 // Stores return the first js argument
5406 int return_value_offset = 0;
5407 if (is_store) {
5408 return_value_offset = 2 + FCA::kArgsLength;
5409 } else {
5410 return_value_offset = 2 + FCA::kReturnValueOffset;
5411 }
5412 Operand return_value_operand(ebp, return_value_offset * kPointerSize);
5413 __ CallApiFunctionAndReturn(api_function_address,
5414 thunk_address,
5415 ApiParameterOperand(1),
5416 argc + FCA::kArgsLength + 1,
5417 return_value_operand,
5418 &context_restore_operand);
5419 }
5420
5421
5422 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5423 // ----------- S t a t e -------------
5424 // -- esp[0] : return address
5425 // -- esp[4] : name
5426 // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
5427 // -- ...
5428 // -- edx : api_function_address
5429 // -----------------------------------
5430
5431 // array for v8::Arguments::values_, handler for name and pointer
5432 // to the values (it considered as smi in GC).
5433 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
5434 // Allocate space for opional callback address parameter in case
5435 // CPU profiler is active.
5436 const int kApiArgc = 2 + 1;
5437
5438 Register api_function_address = edx;
5439 Register scratch = ebx;
5440
5441 // load address of name
5442 __ lea(scratch, Operand(esp, 1 * kPointerSize));
5443
5444 __ PrepareCallApiFunction(kApiArgc);
5445 __ mov(ApiParameterOperand(0), scratch); // name.
5446 __ add(scratch, Immediate(kPointerSize));
5447 __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
5448
5449 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
5450
5451 __ CallApiFunctionAndReturn(api_function_address,
5452 thunk_address,
5453 ApiParameterOperand(2),
5454 kStackSpace,
5455 Operand(ebp, 7 * kPointerSize),
5456 NULL);
5457 }
5458
5459
5724 #undef __ 5460 #undef __
5725 5461
5726 } } // namespace v8::internal 5462 } } // namespace v8::internal
5727 5463
5728 #endif // V8_TARGET_ARCH_IA32 5464 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/codegen-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698