OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
97 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 97 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
98 __ ldr(result, | 98 __ ldr(result, |
99 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); | 99 FieldMemOperand(result, GlobalObject::kNativeContextOffset)); |
100 // Load the Array function from the native context. | 100 // Load the Array function from the native context. |
101 __ ldr(result, | 101 __ ldr(result, |
102 MemOperand(result, | 102 MemOperand(result, |
103 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); | 103 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX))); |
104 } | 104 } |
105 | 105 |
106 | 106 |
107 // Allocate an empty JSArray. The allocated array is put into the result | |
108 // register. An elements backing store is allocated with size initial_capacity | |
109 // and filled with the hole values. | |
110 static void AllocateEmptyJSArray(MacroAssembler* masm, | |
111 Register array_function, | |
112 Register result, | |
113 Register scratch1, | |
114 Register scratch2, | |
115 Register scratch3, | |
116 Label* gc_required) { | |
117 const int initial_capacity = JSArray::kPreallocatedArrayElements; | |
118 STATIC_ASSERT(initial_capacity >= 0); | |
119 __ LoadInitialArrayMap(array_function, scratch2, scratch1, false); | |
120 | |
121 // Allocate the JSArray object together with space for a fixed array with the | |
122 // requested elements. | |
123 int size = JSArray::kSize; | |
124 if (initial_capacity > 0) { | |
125 size += FixedArray::SizeFor(initial_capacity); | |
126 } | |
127 __ Allocate(size, result, scratch2, scratch3, gc_required, TAG_OBJECT); | |
128 | |
129 // Allocated the JSArray. Now initialize the fields except for the elements | |
130 // array. | |
131 // result: JSObject | |
132 // scratch1: initial map | |
133 // scratch2: start of next object | |
134 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset)); | |
135 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex); | |
136 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset)); | |
137 // Field JSArray::kElementsOffset is initialized later. | |
138 __ mov(scratch3, Operand::Zero()); | |
139 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset)); | |
140 | |
141 if (initial_capacity == 0) { | |
142 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset)); | |
143 return; | |
144 } | |
145 | |
146 // Calculate the location of the elements array and set elements array member | |
147 // of the JSArray. | |
148 // result: JSObject | |
149 // scratch2: start of next object | |
150 __ add(scratch1, result, Operand(JSArray::kSize)); | |
151 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset)); | |
152 | |
153 // Clear the heap tag on the elements array. | |
154 __ sub(scratch1, scratch1, Operand(kHeapObjectTag)); | |
155 | |
156 // Initialize the FixedArray and fill it with holes. FixedArray length is | |
157 // stored as a smi. | |
158 // result: JSObject | |
159 // scratch1: elements array (untagged) | |
160 // scratch2: start of next object | |
161 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex); | |
162 STATIC_ASSERT(0 * kPointerSize == FixedArray::kMapOffset); | |
163 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); | |
164 __ mov(scratch3, Operand(Smi::FromInt(initial_capacity))); | |
165 STATIC_ASSERT(1 * kPointerSize == FixedArray::kLengthOffset); | |
166 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); | |
167 | |
168 // Fill the FixedArray with the hole value. Inline the code if short. | |
169 STATIC_ASSERT(2 * kPointerSize == FixedArray::kHeaderSize); | |
170 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); | |
171 static const int kLoopUnfoldLimit = 4; | |
172 if (initial_capacity <= kLoopUnfoldLimit) { | |
173 for (int i = 0; i < initial_capacity; i++) { | |
174 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); | |
175 } | |
176 } else { | |
177 Label loop, entry; | |
178 __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize)); | |
179 __ b(&entry); | |
180 __ bind(&loop); | |
181 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex)); | |
182 __ bind(&entry); | |
183 __ cmp(scratch1, scratch2); | |
184 __ b(lt, &loop); | |
185 } | |
186 } | |
187 | |
188 // Allocate a JSArray with the number of elements stored in a register. The | |
189 // register array_function holds the built-in Array function and the register | |
190 // array_size holds the size of the array as a smi. The allocated array is put | |
191 // into the result register and beginning and end of the FixedArray elements | |
192 // storage is put into registers elements_array_storage and elements_array_end | |
193 // (see below for when that is not the case). If the parameter fill_with_holes | |
194 // is true the allocated elements backing store is filled with the hole values | |
195 // otherwise it is left uninitialized. When the backing store is filled the | |
196 // register elements_array_storage is scratched. | |
197 static void AllocateJSArray(MacroAssembler* masm, | |
198 Register array_function, // Array function. | |
199 Register array_size, // As a smi, cannot be 0. | |
200 Register result, | |
201 Register elements_array_storage, | |
202 Register elements_array_end, | |
203 Register scratch1, | |
204 Register scratch2, | |
205 bool fill_with_hole, | |
206 Label* gc_required) { | |
207 // Load the initial map from the array function. | |
208 __ LoadInitialArrayMap(array_function, scratch2, | |
209 elements_array_storage, fill_with_hole); | |
210 | |
211 if (FLAG_debug_code) { // Assert that array size is not zero. | |
212 __ tst(array_size, array_size); | |
213 __ Assert(ne, "array size is unexpectedly 0"); | |
214 } | |
215 | |
216 // Allocate the JSArray object together with space for a FixedArray with the | |
217 // requested number of elements. | |
218 __ mov(elements_array_end, | |
219 Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize)); | |
220 __ add(elements_array_end, elements_array_end, Operand::SmiUntag(array_size)); | |
221 __ Allocate(elements_array_end, | |
222 result, | |
223 scratch1, | |
224 scratch2, | |
225 gc_required, | |
226 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | |
227 | |
228 // Allocated the JSArray. Now initialize the fields except for the elements | |
229 // array. | |
230 // result: JSObject | |
231 // elements_array_storage: initial map | |
232 // array_size: size of array (smi) | |
233 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset)); | |
234 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex); | |
235 __ str(elements_array_storage, | |
236 FieldMemOperand(result, JSArray::kPropertiesOffset)); | |
237 // Field JSArray::kElementsOffset is initialized later. | |
238 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset)); | |
239 | |
240 // Calculate the location of the elements array and set elements array member | |
241 // of the JSArray. | |
242 // result: JSObject | |
243 // array_size: size of array (smi) | |
244 __ add(elements_array_storage, result, Operand(JSArray::kSize)); | |
245 __ str(elements_array_storage, | |
246 FieldMemOperand(result, JSArray::kElementsOffset)); | |
247 | |
248 // Clear the heap tag on the elements array. | |
249 __ sub(elements_array_storage, | |
250 elements_array_storage, | |
251 Operand(kHeapObjectTag)); | |
252 // Initialize the fixed array and fill it with holes. FixedArray length is | |
253 // stored as a smi. | |
254 // result: JSObject | |
255 // elements_array_storage: elements array (untagged) | |
256 // array_size: size of array (smi) | |
257 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex); | |
258 ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset); | |
259 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex)); | |
260 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset); | |
261 __ str(array_size, | |
262 MemOperand(elements_array_storage, kPointerSize, PostIndex)); | |
263 | |
264 // Calculate elements array and elements array end. | |
265 // result: JSObject | |
266 // elements_array_storage: elements array element storage | |
267 // array_size: smi-tagged size of elements array | |
268 __ add(elements_array_end, | |
269 elements_array_storage, | |
270 Operand::PointerOffsetFromSmiKey(array_size)); | |
271 | |
272 // Fill the allocated FixedArray with the hole value if requested. | |
273 // result: JSObject | |
274 // elements_array_storage: elements array element storage | |
275 // elements_array_end: start of next object | |
276 if (fill_with_hole) { | |
277 Label loop, entry; | |
278 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex); | |
279 __ jmp(&entry); | |
280 __ bind(&loop); | |
281 __ str(scratch1, | |
282 MemOperand(elements_array_storage, kPointerSize, PostIndex)); | |
283 __ bind(&entry); | |
284 __ cmp(elements_array_storage, elements_array_end); | |
285 __ b(lt, &loop); | |
286 } | |
287 } | |
288 | |
289 // Create a new array for the built-in Array function. This function allocates | |
290 // the JSArray object and the FixedArray elements array and initializes these. | |
291 // If the Array cannot be constructed in native code the runtime is called. This | |
292 // function assumes the following state: | |
293 // r0: argc | |
294 // r1: constructor (built-in Array function) | |
295 // lr: return address | |
296 // sp[0]: last argument | |
297 // This function is used for both construct and normal calls of Array. The only | |
298 // difference between handling a construct call and a normal call is that for a | |
299 // construct call the constructor function in r1 needs to be preserved for | |
300 // entering the generic code. In both cases argc in r0 needs to be preserved. | |
301 // Both registers are preserved by this code so no need to differentiate between | |
302 // construct call and normal call. | |
303 void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code) { | |
304 Counters* counters = masm->isolate()->counters(); | |
305 Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array, | |
306 has_non_smi_element, finish, cant_transition_map, not_double; | |
307 | |
308 // Check for array construction with zero arguments or one. | |
309 __ cmp(r0, Operand::Zero()); | |
310 __ b(ne, &argc_one_or_more); | |
311 | |
312 // Handle construction of an empty array. | |
313 __ bind(&empty_array); | |
314 AllocateEmptyJSArray(masm, | |
315 r1, | |
316 r2, | |
317 r3, | |
318 r4, | |
319 r5, | |
320 call_generic_code); | |
321 __ IncrementCounter(counters->array_function_native(), 1, r3, r4); | |
322 // Set up return value, remove receiver from stack and return. | |
323 __ mov(r0, r2); | |
324 __ add(sp, sp, Operand(kPointerSize)); | |
325 __ Jump(lr); | |
326 | |
327 // Check for one argument. Bail out if argument is not smi or if it is | |
328 // negative. | |
329 __ bind(&argc_one_or_more); | |
330 __ cmp(r0, Operand(1)); | |
331 __ b(ne, &argc_two_or_more); | |
332 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack. | |
333 __ tst(r2, r2); | |
334 __ b(ne, ¬_empty_array); | |
335 __ Drop(1); // Adjust stack. | |
336 __ mov(r0, Operand::Zero()); // Treat this as a call with argc of zero. | |
337 __ b(&empty_array); | |
338 | |
339 __ bind(¬_empty_array); | |
340 STATIC_ASSERT(kSmiTag == 0); | |
341 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC); | |
342 __ b(ne, call_generic_code); | |
343 | |
344 // Handle construction of an empty array of a certain size. Bail out if size | |
345 // is too large to actually allocate an elements array. | |
346 STATIC_ASSERT(kSmiTag == 0); | |
347 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize)); | |
348 __ b(ge, call_generic_code); | |
349 | |
350 // r0: argc | |
351 // r1: constructor | |
352 // r2: array_size (smi) | |
353 // sp[0]: argument | |
354 AllocateJSArray(masm, | |
355 r1, | |
356 r2, | |
357 r3, | |
358 r4, | |
359 r5, | |
360 r6, | |
361 r7, | |
362 true, | |
363 call_generic_code); | |
364 __ IncrementCounter(counters->array_function_native(), 1, r2, r4); | |
365 // Set up return value, remove receiver and argument from stack and return. | |
366 __ mov(r0, r3); | |
367 __ add(sp, sp, Operand(2 * kPointerSize)); | |
368 __ Jump(lr); | |
369 | |
370 // Handle construction of an array from a list of arguments. | |
371 __ bind(&argc_two_or_more); | |
372 __ SmiTag(r2, r0); | |
373 | |
374 // r0: argc | |
375 // r1: constructor | |
376 // r2: array_size (smi) | |
377 // sp[0]: last argument | |
378 AllocateJSArray(masm, | |
379 r1, | |
380 r2, | |
381 r3, | |
382 r4, | |
383 r5, | |
384 r6, | |
385 r7, | |
386 false, | |
387 call_generic_code); | |
388 __ IncrementCounter(counters->array_function_native(), 1, r2, r6); | |
389 | |
390 // Fill arguments as array elements. Copy from the top of the stack (last | |
391 // element) to the array backing store filling it backwards. Note: | |
392 // elements_array_end points after the backing store therefore PreIndex is | |
393 // used when filling the backing store. | |
394 // r0: argc | |
395 // r3: JSArray | |
396 // r4: elements_array storage start (untagged) | |
397 // r5: elements_array_end (untagged) | |
398 // sp[0]: last argument | |
399 Label loop, entry; | |
400 __ mov(r7, sp); | |
401 __ jmp(&entry); | |
402 __ bind(&loop); | |
403 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex)); | |
404 if (FLAG_smi_only_arrays) { | |
405 __ JumpIfNotSmi(r2, &has_non_smi_element); | |
406 } | |
407 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex)); | |
408 __ bind(&entry); | |
409 __ cmp(r4, r5); | |
410 __ b(lt, &loop); | |
411 | |
412 __ bind(&finish); | |
413 __ mov(sp, r7); | |
414 | |
415 // Remove caller arguments and receiver from the stack, setup return value and | |
416 // return. | |
417 // r0: argc | |
418 // r3: JSArray | |
419 // sp[0]: receiver | |
420 __ add(sp, sp, Operand(kPointerSize)); | |
421 __ mov(r0, r3); | |
422 __ Jump(lr); | |
423 | |
424 __ bind(&has_non_smi_element); | |
425 // Double values are handled by the runtime. | |
426 __ CheckMap( | |
427 r2, r9, Heap::kHeapNumberMapRootIndex, ¬_double, DONT_DO_SMI_CHECK); | |
428 __ bind(&cant_transition_map); | |
429 __ UndoAllocationInNewSpace(r3, r4); | |
430 __ b(call_generic_code); | |
431 | |
432 __ bind(¬_double); | |
433 // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS. | |
434 // r3: JSArray | |
435 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
436 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | |
437 FAST_ELEMENTS, | |
438 r2, | |
439 r9, | |
440 &cant_transition_map); | |
441 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
442 __ RecordWriteField(r3, | |
443 HeapObject::kMapOffset, | |
444 r2, | |
445 r9, | |
446 kLRHasNotBeenSaved, | |
447 kDontSaveFPRegs, | |
448 EMIT_REMEMBERED_SET, | |
449 OMIT_SMI_CHECK); | |
450 Label loop2; | |
451 __ sub(r7, r7, Operand(kPointerSize)); | |
452 __ bind(&loop2); | |
453 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex)); | |
454 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex)); | |
455 __ cmp(r4, r5); | |
456 __ b(lt, &loop2); | |
457 __ b(&finish); | |
458 } | |
459 | |
460 | |
461 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { | 107 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
462 // ----------- S t a t e ------------- | 108 // ----------- S t a t e ------------- |
463 // -- r0 : number of arguments | 109 // -- r0 : number of arguments |
464 // -- lr : return address | 110 // -- lr : return address |
465 // -- sp[...]: constructor arguments | 111 // -- sp[...]: constructor arguments |
466 // ----------------------------------- | 112 // ----------------------------------- |
467 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 113 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
468 | 114 |
469 // Get the InternalArray function. | 115 // Get the InternalArray function. |
470 GenerateLoadInternalArrayFunction(masm, r1); | 116 GenerateLoadInternalArrayFunction(masm, r1); |
471 | 117 |
472 if (FLAG_debug_code) { | 118 if (FLAG_debug_code) { |
473 // Initial map for the builtin InternalArray functions should be maps. | 119 // Initial map for the builtin InternalArray functions should be maps. |
474 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 120 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
475 __ SmiTst(r2); | 121 __ SmiTst(r2); |
476 __ Assert(ne, "Unexpected initial map for InternalArray function"); | 122 __ Assert(ne, "Unexpected initial map for InternalArray function"); |
477 __ CompareObjectType(r2, r3, r4, MAP_TYPE); | 123 __ CompareObjectType(r2, r3, r4, MAP_TYPE); |
478 __ Assert(eq, "Unexpected initial map for InternalArray function"); | 124 __ Assert(eq, "Unexpected initial map for InternalArray function"); |
479 } | 125 } |
480 | 126 |
481 // Run the native code for the InternalArray function called as a normal | 127 // Run the native code for the InternalArray function called as a normal |
482 // function. | 128 // function. |
483 if (FLAG_optimize_constructed_arrays) { | 129 // tail call a stub |
484 // tail call a stub | 130 InternalArrayConstructorStub stub(masm->isolate()); |
485 InternalArrayConstructorStub stub(masm->isolate()); | 131 __ TailCallStub(&stub); |
486 __ TailCallStub(&stub); | |
487 } else { | |
488 ArrayNativeCode(masm, &generic_array_code); | |
489 | |
490 // Jump to the generic array code if the specialized code cannot handle the | |
491 // construction. | |
492 __ bind(&generic_array_code); | |
493 Handle<Code> array_code = | |
494 masm->isolate()->builtins()->InternalArrayCodeGeneric(); | |
495 __ Jump(array_code, RelocInfo::CODE_TARGET); | |
496 } | |
497 } | 132 } |
498 | 133 |
499 | 134 |
500 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | 135 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
501 // ----------- S t a t e ------------- | 136 // ----------- S t a t e ------------- |
502 // -- r0 : number of arguments | 137 // -- r0 : number of arguments |
503 // -- lr : return address | 138 // -- lr : return address |
504 // -- sp[...]: constructor arguments | 139 // -- sp[...]: constructor arguments |
505 // ----------------------------------- | 140 // ----------------------------------- |
506 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 141 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
507 | 142 |
508 // Get the Array function. | 143 // Get the Array function. |
509 GenerateLoadArrayFunction(masm, r1); | 144 GenerateLoadArrayFunction(masm, r1); |
510 | 145 |
511 if (FLAG_debug_code) { | 146 if (FLAG_debug_code) { |
512 // Initial map for the builtin Array functions should be maps. | 147 // Initial map for the builtin Array functions should be maps. |
513 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 148 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
514 __ SmiTst(r2); | 149 __ SmiTst(r2); |
515 __ Assert(ne, "Unexpected initial map for Array function"); | 150 __ Assert(ne, "Unexpected initial map for Array function"); |
516 __ CompareObjectType(r2, r3, r4, MAP_TYPE); | 151 __ CompareObjectType(r2, r3, r4, MAP_TYPE); |
517 __ Assert(eq, "Unexpected initial map for Array function"); | 152 __ Assert(eq, "Unexpected initial map for Array function"); |
518 } | 153 } |
519 | 154 |
520 // Run the native code for the Array function called as a normal function. | 155 // Run the native code for the Array function called as a normal function. |
521 if (FLAG_optimize_constructed_arrays) { | 156 // tail call a stub |
522 // tail call a stub | 157 Handle<Object> undefined_sentinel( |
523 Handle<Object> undefined_sentinel( | 158 masm->isolate()->heap()->undefined_value(), |
524 masm->isolate()->heap()->undefined_value(), | 159 masm->isolate()); |
525 masm->isolate()); | 160 __ mov(r2, Operand(undefined_sentinel)); |
526 __ mov(r2, Operand(undefined_sentinel)); | 161 ArrayConstructorStub stub(masm->isolate()); |
527 ArrayConstructorStub stub(masm->isolate()); | 162 __ TailCallStub(&stub); |
528 __ TailCallStub(&stub); | |
529 } else { | |
530 ArrayNativeCode(masm, &generic_array_code); | |
531 | |
532 // Jump to the generic array code if the specialized code cannot handle | |
533 // the construction. | |
534 __ bind(&generic_array_code); | |
535 Handle<Code> array_code = | |
536 masm->isolate()->builtins()->ArrayCodeGeneric(); | |
537 __ Jump(array_code, RelocInfo::CODE_TARGET); | |
538 } | |
539 } | 163 } |
540 | 164 |
541 | 165 |
542 void Builtins::Generate_CommonArrayConstructCode(MacroAssembler* masm) { | |
543 // ----------- S t a t e ------------- | |
544 // -- r0 : number of arguments | |
545 // -- r1 : constructor function | |
546 // -- r2 : type info cell | |
547 // -- lr : return address | |
548 // -- sp[...]: constructor arguments | |
549 // ----------------------------------- | |
550 | |
551 if (FLAG_debug_code) { | |
552 // The array construct code is only set for the builtin and internal | |
553 // Array functions which always have a map. | |
554 // Initial map for the builtin Array function should be a map. | |
555 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | |
556 __ SmiTst(r3); | |
557 __ Assert(ne, "Unexpected initial map for Array function"); | |
558 __ CompareObjectType(r3, r3, r4, MAP_TYPE); | |
559 __ Assert(eq, "Unexpected initial map for Array function"); | |
560 } | |
561 Label generic_constructor; | |
562 // Run the native code for the Array function called as a constructor. | |
563 ArrayNativeCode(masm, &generic_constructor); | |
564 | |
565 // Jump to the generic construct code in case the specialized code cannot | |
566 // handle the construction. | |
567 __ bind(&generic_constructor); | |
568 Handle<Code> generic_construct_stub = | |
569 masm->isolate()->builtins()->JSConstructStubGeneric(); | |
570 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); | |
571 } | |
572 | |
573 | |
574 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | 166 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { |
575 // ----------- S t a t e ------------- | 167 // ----------- S t a t e ------------- |
576 // -- r0 : number of arguments | 168 // -- r0 : number of arguments |
577 // -- r1 : constructor function | 169 // -- r1 : constructor function |
578 // -- lr : return address | 170 // -- lr : return address |
579 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) | 171 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based) |
580 // -- sp[argc * 4] : receiver | 172 // -- sp[argc * 4] : receiver |
581 // ----------------------------------- | 173 // ----------------------------------- |
582 Counters* counters = masm->isolate()->counters(); | 174 Counters* counters = masm->isolate()->counters(); |
583 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3); | 175 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3); |
(...skipping 1299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1883 __ bind(&dont_adapt_arguments); | 1475 __ bind(&dont_adapt_arguments); |
1884 __ Jump(r3); | 1476 __ Jump(r3); |
1885 } | 1477 } |
1886 | 1478 |
1887 | 1479 |
1888 #undef __ | 1480 #undef __ |
1889 | 1481 |
1890 } } // namespace v8::internal | 1482 } } // namespace v8::internal |
1891 | 1483 |
1892 #endif // V8_TARGET_ARCH_ARM | 1484 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |