Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(696)

Side by Side Diff: src/mips/builtins-mips.cc

Issue 16453002: Removed flag optimize-constructed-arrays. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Comment fixes Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-ia32.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
101 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 101 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
102 __ lw(result, 102 __ lw(result,
103 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); 103 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
104 // Load the Array function from the native context. 104 // Load the Array function from the native context.
105 __ lw(result, 105 __ lw(result,
106 MemOperand(result, 106 MemOperand(result,
107 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); 107 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
108 } 108 }
109 109
110 110
111 // Allocate an empty JSArray. The allocated array is put into the result
112 // register. An elements backing store is allocated with size initial_capacity
113 // and filled with the hole values.
114 static void AllocateEmptyJSArray(MacroAssembler* masm,
115 Register array_function,
116 Register result,
117 Register scratch1,
118 Register scratch2,
119 Register scratch3,
120 Label* gc_required) {
121 const int initial_capacity = JSArray::kPreallocatedArrayElements;
122 STATIC_ASSERT(initial_capacity >= 0);
123 __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
124
125 // Allocate the JSArray object together with space for a fixed array with the
126 // requested elements.
127 int size = JSArray::kSize;
128 if (initial_capacity > 0) {
129 size += FixedArray::SizeFor(initial_capacity);
130 }
131 __ Allocate(size, result, scratch2, scratch3, gc_required, TAG_OBJECT);
132
133 // Allocated the JSArray. Now initialize the fields except for the elements
134 // array.
135 // result: JSObject
136 // scratch1: initial map
137 // scratch2: start of next object
138 __ sw(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
139 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
140 __ sw(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
141 // Field JSArray::kElementsOffset is initialized later.
142 __ mov(scratch3, zero_reg);
143 __ sw(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
144
145 if (initial_capacity == 0) {
146 __ sw(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
147 return;
148 }
149
150 // Calculate the location of the elements array and set elements array member
151 // of the JSArray.
152 // result: JSObject
153 // scratch2: start of next object
154 __ Addu(scratch1, result, Operand(JSArray::kSize));
155 __ sw(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
156
157 // Clear the heap tag on the elements array.
158 __ And(scratch1, scratch1, Operand(~kHeapObjectTagMask));
159
160 // Initialize the FixedArray and fill it with holes. FixedArray length is
161 // stored as a smi.
162 // result: JSObject
163 // scratch1: elements array (untagged)
164 // scratch2: start of next object
165 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
166 STATIC_ASSERT(0 * kPointerSize == FixedArray::kMapOffset);
167 __ sw(scratch3, MemOperand(scratch1));
168 __ Addu(scratch1, scratch1, kPointerSize);
169 __ li(scratch3, Operand(Smi::FromInt(initial_capacity)));
170 STATIC_ASSERT(1 * kPointerSize == FixedArray::kLengthOffset);
171 __ sw(scratch3, MemOperand(scratch1));
172 __ Addu(scratch1, scratch1, kPointerSize);
173
174 // Fill the FixedArray with the hole value. Inline the code if short.
175 STATIC_ASSERT(2 * kPointerSize == FixedArray::kHeaderSize);
176 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
177 static const int kLoopUnfoldLimit = 4;
178 if (initial_capacity <= kLoopUnfoldLimit) {
179 for (int i = 0; i < initial_capacity; i++) {
180 __ sw(scratch3, MemOperand(scratch1, i * kPointerSize));
181 }
182 } else {
183 Label loop, entry;
184 __ Addu(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
185 __ Branch(&entry);
186 __ bind(&loop);
187 __ sw(scratch3, MemOperand(scratch1));
188 __ Addu(scratch1, scratch1, kPointerSize);
189 __ bind(&entry);
190 __ Branch(&loop, lt, scratch1, Operand(scratch2));
191 }
192 }
193
194
195 // Allocate a JSArray with the number of elements stored in a register. The
196 // register array_function holds the built-in Array function and the register
197 // array_size holds the size of the array as a smi. The allocated array is put
198 // into the result register and beginning and end of the FixedArray elements
199 // storage is put into registers elements_array_storage and elements_array_end
200 // (see below for when that is not the case). If the parameter fill_with_holes
201 // is true the allocated elements backing store is filled with the hole values
202 // otherwise it is left uninitialized. When the backing store is filled the
203 // register elements_array_storage is scratched.
204 static void AllocateJSArray(MacroAssembler* masm,
205 Register array_function, // Array function.
206 Register array_size, // As a smi, cannot be 0.
207 Register result,
208 Register elements_array_storage,
209 Register elements_array_end,
210 Register scratch1,
211 Register scratch2,
212 bool fill_with_hole,
213 Label* gc_required) {
214 // Load the initial map from the array function.
215 __ LoadInitialArrayMap(array_function, scratch2,
216 elements_array_storage, fill_with_hole);
217
218 if (FLAG_debug_code) { // Assert that array size is not zero.
219 __ Assert(
220 ne, "array size is unexpectedly 0", array_size, Operand(zero_reg));
221 }
222
223 // Allocate the JSArray object together with space for a FixedArray with the
224 // requested number of elements.
225 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
226 __ li(elements_array_end,
227 (JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize);
228 __ sra(scratch1, array_size, kSmiTagSize);
229 __ Addu(elements_array_end, elements_array_end, scratch1);
230 __ Allocate(elements_array_end,
231 result,
232 scratch1,
233 scratch2,
234 gc_required,
235 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
236
237 // Allocated the JSArray. Now initialize the fields except for the elements
238 // array.
239 // result: JSObject
240 // elements_array_storage: initial map
241 // array_size: size of array (smi)
242 __ sw(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
243 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
244 __ sw(elements_array_storage,
245 FieldMemOperand(result, JSArray::kPropertiesOffset));
246 // Field JSArray::kElementsOffset is initialized later.
247 __ sw(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
248
249 // Calculate the location of the elements array and set elements array member
250 // of the JSArray.
251 // result: JSObject
252 // array_size: size of array (smi)
253 __ Addu(elements_array_storage, result, Operand(JSArray::kSize));
254 __ sw(elements_array_storage,
255 FieldMemOperand(result, JSArray::kElementsOffset));
256
257 // Clear the heap tag on the elements array.
258 __ And(elements_array_storage,
259 elements_array_storage,
260 Operand(~kHeapObjectTagMask));
261 // Initialize the fixed array and fill it with holes. FixedArray length is
262 // stored as a smi.
263 // result: JSObject
264 // elements_array_storage: elements array (untagged)
265 // array_size: size of array (smi)
266 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
267 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
268 __ sw(scratch1, MemOperand(elements_array_storage));
269 __ Addu(elements_array_storage, elements_array_storage, kPointerSize);
270
271 // Length of the FixedArray is the number of pre-allocated elements if
272 // the actual JSArray has length 0 and the size of the JSArray for non-empty
273 // JSArrays. The length of a FixedArray is stored as a smi.
274 STATIC_ASSERT(kSmiTag == 0);
275
276 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
277 __ sw(array_size, MemOperand(elements_array_storage));
278 __ Addu(elements_array_storage, elements_array_storage, kPointerSize);
279
280 // Calculate elements array and elements array end.
281 // result: JSObject
282 // elements_array_storage: elements array element storage
283 // array_size: smi-tagged size of elements array
284 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
285 __ sll(elements_array_end, array_size, kPointerSizeLog2 - kSmiTagSize);
286 __ Addu(elements_array_end, elements_array_storage, elements_array_end);
287
288 // Fill the allocated FixedArray with the hole value if requested.
289 // result: JSObject
290 // elements_array_storage: elements array element storage
291 // elements_array_end: start of next object
292 if (fill_with_hole) {
293 Label loop, entry;
294 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
295 __ Branch(&entry);
296 __ bind(&loop);
297 __ sw(scratch1, MemOperand(elements_array_storage));
298 __ Addu(elements_array_storage, elements_array_storage, kPointerSize);
299
300 __ bind(&entry);
301 __ Branch(&loop, lt, elements_array_storage, Operand(elements_array_end));
302 }
303 }
304
305
306 // Create a new array for the built-in Array function. This function allocates
307 // the JSArray object and the FixedArray elements array and initializes these.
308 // If the Array cannot be constructed in native code the runtime is called. This
309 // function assumes the following state:
310 // a0: argc
311 // a1: constructor (built-in Array function)
312 // ra: return address
313 // sp[0]: last argument
314 // This function is used for both construct and normal calls of Array. The only
315 // difference between handling a construct call and a normal call is that for a
316 // construct call the constructor function in a1 needs to be preserved for
317 // entering the generic code. In both cases argc in a0 needs to be preserved.
318 // Both registers are preserved by this code so no need to differentiate between
319 // construct call and normal call.
320 void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) {
321 Counters* counters = masm->isolate()->counters();
322 Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
323 has_non_smi_element, finish, cant_transition_map, not_double;
324
325 // Check for array construction with zero arguments or one.
326 __ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg));
327 // Handle construction of an empty array.
328 __ bind(&empty_array);
329 AllocateEmptyJSArray(masm,
330 a1,
331 a2,
332 a3,
333 t0,
334 t1,
335 call_generic_code);
336 __ IncrementCounter(counters->array_function_native(), 1, a3, t0);
337 // Set up return value, remove receiver from stack and return.
338 __ Addu(sp, sp, Operand(kPointerSize));
339 __ Ret(USE_DELAY_SLOT);
340 __ mov(v0, a2);
341
342 // Check for one argument. Bail out if argument is not smi or if it is
343 // negative.
344 __ bind(&argc_one_or_more);
345 __ Branch(&argc_two_or_more, ne, a0, Operand(1));
346
347 STATIC_ASSERT(kSmiTag == 0);
348 __ lw(a2, MemOperand(sp)); // Get the argument from the stack.
349 __ Branch(&not_empty_array, ne, a2, Operand(zero_reg));
350 __ Drop(1); // Adjust stack.
351 __ mov(a0, zero_reg); // Treat this as a call with argc of zero.
352 __ Branch(&empty_array);
353
354 __ bind(&not_empty_array);
355 __ And(a3, a2, Operand(kIntptrSignBit | kSmiTagMask));
356 __ Branch(call_generic_code, eq, a3, Operand(zero_reg));
357
358 // Handle construction of an empty array of a certain size. Bail out if size
359 // is too large to actually allocate an elements array.
360 STATIC_ASSERT(kSmiTag == 0);
361 __ Branch(call_generic_code, Ugreater_equal, a2,
362 Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
363
364 // a0: argc
365 // a1: constructor
366 // a2: array_size (smi)
367 // sp[0]: argument
368 AllocateJSArray(masm,
369 a1,
370 a2,
371 a3,
372 t0,
373 t1,
374 t2,
375 t3,
376 true,
377 call_generic_code);
378 __ IncrementCounter(counters->array_function_native(), 1, a2, t0);
379
380 // Set up return value, remove receiver and argument from stack and return.
381 __ Addu(sp, sp, Operand(2 * kPointerSize));
382 __ Ret(USE_DELAY_SLOT);
383 __ mov(v0, a3);
384
385 // Handle construction of an array from a list of arguments.
386 __ bind(&argc_two_or_more);
387 __ sll(a2, a0, kSmiTagSize); // Convert argc to a smi.
388
389 // a0: argc
390 // a1: constructor
391 // a2: array_size (smi)
392 // sp[0]: last argument
393 AllocateJSArray(masm,
394 a1,
395 a2,
396 a3,
397 t0,
398 t1,
399 t2,
400 t3,
401 false,
402 call_generic_code);
403 __ IncrementCounter(counters->array_function_native(), 1, a2, t2);
404
405 // Fill arguments as array elements. Copy from the top of the stack (last
406 // element) to the array backing store filling it backwards. Note:
407 // elements_array_end points after the backing store.
408 // a0: argc
409 // a3: JSArray
410 // t0: elements_array storage start (untagged)
411 // t1: elements_array_end (untagged)
412 // sp[0]: last argument
413
414 Label loop, entry;
415 __ Branch(USE_DELAY_SLOT, &entry);
416 __ mov(t3, sp);
417 __ bind(&loop);
418 __ lw(a2, MemOperand(t3));
419 if (FLAG_smi_only_arrays) {
420 __ JumpIfNotSmi(a2, &has_non_smi_element);
421 }
422 __ Addu(t3, t3, kPointerSize);
423 __ Addu(t1, t1, -kPointerSize);
424 __ sw(a2, MemOperand(t1));
425 __ bind(&entry);
426 __ Branch(&loop, lt, t0, Operand(t1));
427
428 __ bind(&finish);
429 __ mov(sp, t3);
430
431 // Remove caller arguments and receiver from the stack, setup return value and
432 // return.
433 // a0: argc
434 // a3: JSArray
435 // sp[0]: receiver
436 __ Addu(sp, sp, Operand(kPointerSize));
437 __ Ret(USE_DELAY_SLOT);
438 __ mov(v0, a3);
439
440 __ bind(&has_non_smi_element);
441 // Double values are handled by the runtime.
442 __ CheckMap(
443 a2, t5, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
444 __ bind(&cant_transition_map);
445 __ UndoAllocationInNewSpace(a3, t0);
446 __ Branch(call_generic_code);
447
448 __ bind(&not_double);
449 // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
450 // a3: JSArray
451 __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
452 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
453 FAST_ELEMENTS,
454 a2,
455 t5,
456 &cant_transition_map);
457 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
458 __ RecordWriteField(a3,
459 HeapObject::kMapOffset,
460 a2,
461 t5,
462 kRAHasNotBeenSaved,
463 kDontSaveFPRegs,
464 EMIT_REMEMBERED_SET,
465 OMIT_SMI_CHECK);
466 Label loop2;
467 __ bind(&loop2);
468 __ lw(a2, MemOperand(t3));
469 __ Addu(t3, t3, kPointerSize);
470 __ Subu(t1, t1, kPointerSize);
471 __ sw(a2, MemOperand(t1));
472 __ Branch(&loop2, lt, t0, Operand(t1));
473 __ Branch(&finish);
474 }
475
476
477 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 111 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
478 // ----------- S t a t e ------------- 112 // ----------- S t a t e -------------
479 // -- a0 : number of arguments 113 // -- a0 : number of arguments
480 // -- ra : return address 114 // -- ra : return address
481 // -- sp[...]: constructor arguments 115 // -- sp[...]: constructor arguments
482 // ----------------------------------- 116 // -----------------------------------
483 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; 117 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
484 118
485 // Get the InternalArray function. 119 // Get the InternalArray function.
486 GenerateLoadInternalArrayFunction(masm, a1); 120 GenerateLoadInternalArrayFunction(masm, a1);
487 121
488 if (FLAG_debug_code) { 122 if (FLAG_debug_code) {
489 // Initial map for the builtin InternalArray functions should be maps. 123 // Initial map for the builtin InternalArray functions should be maps.
490 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 124 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
491 __ And(t0, a2, Operand(kSmiTagMask)); 125 __ And(t0, a2, Operand(kSmiTagMask));
492 __ Assert(ne, "Unexpected initial map for InternalArray function", 126 __ Assert(ne, "Unexpected initial map for InternalArray function",
493 t0, Operand(zero_reg)); 127 t0, Operand(zero_reg));
494 __ GetObjectType(a2, a3, t0); 128 __ GetObjectType(a2, a3, t0);
495 __ Assert(eq, "Unexpected initial map for InternalArray function", 129 __ Assert(eq, "Unexpected initial map for InternalArray function",
496 t0, Operand(MAP_TYPE)); 130 t0, Operand(MAP_TYPE));
497 } 131 }
498 132
499 // Run the native code for the InternalArray function called as a normal 133 // Run the native code for the InternalArray function called as a normal
500 // function. 134 // function.
501 if (FLAG_optimize_constructed_arrays) { 135 // Tail call a stub.
502 // Tail call a stub. 136 InternalArrayConstructorStub stub(masm->isolate());
503 InternalArrayConstructorStub stub(masm->isolate()); 137 __ TailCallStub(&stub);
504 __ TailCallStub(&stub);
505 } else {
506 ArrayNativeCode(masm, &generic_array_code);
507
508 // Jump to the generic array code if the specialized code cannot handle the
509 // construction.
510 __ bind(&generic_array_code);
511 Handle<Code> array_code =
512 masm->isolate()->builtins()->InternalArrayCodeGeneric();
513 __ Jump(array_code, RelocInfo::CODE_TARGET);
514 }
515 } 138 }
516 139
517 140
518 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 141 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
519 // ----------- S t a t e ------------- 142 // ----------- S t a t e -------------
520 // -- a0 : number of arguments 143 // -- a0 : number of arguments
521 // -- ra : return address 144 // -- ra : return address
522 // -- sp[...]: constructor arguments 145 // -- sp[...]: constructor arguments
523 // ----------------------------------- 146 // -----------------------------------
524 Label generic_array_code; 147 Label generic_array_code;
525 148
526 // Get the Array function. 149 // Get the Array function.
527 GenerateLoadArrayFunction(masm, a1); 150 GenerateLoadArrayFunction(masm, a1);
528 151
529 if (FLAG_debug_code) { 152 if (FLAG_debug_code) {
530 // Initial map for the builtin Array functions should be maps. 153 // Initial map for the builtin Array functions should be maps.
531 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 154 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
532 __ And(t0, a2, Operand(kSmiTagMask)); 155 __ And(t0, a2, Operand(kSmiTagMask));
533 __ Assert(ne, "Unexpected initial map for Array function (1)", 156 __ Assert(ne, "Unexpected initial map for Array function (1)",
534 t0, Operand(zero_reg)); 157 t0, Operand(zero_reg));
535 __ GetObjectType(a2, a3, t0); 158 __ GetObjectType(a2, a3, t0);
536 __ Assert(eq, "Unexpected initial map for Array function (2)", 159 __ Assert(eq, "Unexpected initial map for Array function (2)",
537 t0, Operand(MAP_TYPE)); 160 t0, Operand(MAP_TYPE));
538 } 161 }
539 162
540 // Run the native code for the Array function called as a normal function. 163 // Run the native code for the Array function called as a normal function.
541 if (FLAG_optimize_constructed_arrays) { 164 // Tail call a stub.
542 // Tail call a stub. 165 Handle<Object> undefined_sentinel(
543 Handle<Object> undefined_sentinel( 166 masm->isolate()->heap()->undefined_value(),
544 masm->isolate()->heap()->undefined_value(), 167 masm->isolate());
545 masm->isolate()); 168 __ li(a2, Operand(undefined_sentinel));
546 __ li(a2, Operand(undefined_sentinel)); 169 ArrayConstructorStub stub(masm->isolate());
547 ArrayConstructorStub stub(masm->isolate()); 170 __ TailCallStub(&stub);
548 __ TailCallStub(&stub);
549 } else {
550 ArrayNativeCode(masm, &generic_array_code);
551
552 // Jump to the generic array code if the specialized code cannot handle
553 // the construction.
554 __ bind(&generic_array_code);
555 Handle<Code> array_code =
556 masm->isolate()->builtins()->ArrayCodeGeneric();
557 __ Jump(array_code, RelocInfo::CODE_TARGET);
558 }
559 } 171 }
560 172
561 173
562 void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) {
563 // ----------- S t a t e -------------
564 // -- a0 : number of arguments
565 // -- a1 : constructor function
566 // -- a2 : type info cell
567 // -- ra : return address
568 // -- sp[...]: constructor arguments
569 // -----------------------------------
570
571 if (FLAG_debug_code) {
572 // The array construct code is only set for the builtin and internal
573 // Array functions which always have a map.
574 // Initial map for the builtin Array function should be a map.
575 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
576 __ And(t0, a3, Operand(kSmiTagMask));
577 __ Assert(ne, "Unexpected initial map for Array function (3)",
578 t0, Operand(zero_reg));
579 __ GetObjectType(a3, a3, t0);
580 __ Assert(eq, "Unexpected initial map for Array function (4)",
581 t0, Operand(MAP_TYPE));
582 }
583 Label generic_constructor;
584 // Run the native code for the Array function called as a constructor.
585 ArrayNativeCode(masm, &generic_constructor);
586
587 // Jump to the generic construct code in case the specialized code cannot
588 // handle the construction.
589 __ bind(&generic_constructor);
590 Handle<Code> generic_construct_stub =
591 masm->isolate()->builtins()->JSConstructStubGeneric();
592 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
593 }
594
595
596 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { 174 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
597 // ----------- S t a t e ------------- 175 // ----------- S t a t e -------------
598 // -- a0 : number of arguments 176 // -- a0 : number of arguments
599 // -- a1 : constructor function 177 // -- a1 : constructor function
600 // -- ra : return address 178 // -- ra : return address
601 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) 179 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
602 // -- sp[argc * 4] : receiver 180 // -- sp[argc * 4] : receiver
603 // ----------------------------------- 181 // -----------------------------------
604 Counters* counters = masm->isolate()->counters(); 182 Counters* counters = masm->isolate()->counters();
605 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3); 183 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
(...skipping 1331 matching lines...) Expand 10 before | Expand all | Expand 10 after
1937 __ bind(&dont_adapt_arguments); 1515 __ bind(&dont_adapt_arguments);
1938 __ Jump(a3); 1516 __ Jump(a3);
1939 } 1517 }
1940 1518
1941 1519
1942 #undef __ 1520 #undef __
1943 1521
1944 } } // namespace v8::internal 1522 } } // namespace v8::internal
1945 1523
1946 #endif // V8_TARGET_ARCH_MIPS 1524 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/ia32/lithium-ia32.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698