OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
304 // Need to collect. Call into runtime system. | 304 // Need to collect. Call into runtime system. |
305 __ bind(&gc); | 305 __ bind(&gc); |
306 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 306 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
307 } | 307 } |
308 | 308 |
309 | 309 |
310 static void GenerateFastCloneShallowArrayCommon( | 310 static void GenerateFastCloneShallowArrayCommon( |
311 MacroAssembler* masm, | 311 MacroAssembler* masm, |
312 int length, | 312 int length, |
313 FastCloneShallowArrayStub::Mode mode, | 313 FastCloneShallowArrayStub::Mode mode, |
314 bool want_allocation_info, | |
danno
2012/12/26 10:32:01
Instead of passing a bool, create a enum in the he
mvstanton
2013/01/03 14:40:43
Done...I made a separate enum. I'd like to coalesc
| |
314 Label* fail) { | 315 Label* fail) { |
315 // Registers on entry: | 316 // Registers on entry: |
316 // | 317 // |
317 // ecx: boilerplate literal array. | 318 // ecx: boilerplate literal array. |
318 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | 319 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
319 | 320 |
320 // All sizes here are multiples of kPointerSize. | 321 // All sizes here are multiples of kPointerSize. |
321 int elements_size = 0; | 322 int elements_size = 0; |
322 if (length > 0) { | 323 if (length > 0) { |
323 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 324 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
324 ? FixedDoubleArray::SizeFor(length) | 325 ? FixedDoubleArray::SizeFor(length) |
325 : FixedArray::SizeFor(length); | 326 : FixedArray::SizeFor(length); |
326 } | 327 } |
327 int size = JSArray::kSize + elements_size; | 328 int size = JSArray::kSize; |
329 int allocation_info_start = size; | |
330 if (want_allocation_info) { | |
331 size += AllocationSiteInfo::kSize; | |
332 } | |
333 size += elements_size; | |
328 | 334 |
329 // Allocate both the JS array and the elements array in one big | 335 // Allocate both the JS array and the elements array in one big |
330 // allocation. This avoids multiple limit checks. | 336 // allocation. This avoids multiple limit checks. |
331 __ AllocateInNewSpace(size, eax, ebx, edx, fail, TAG_OBJECT); | 337 __ AllocateInNewSpace(size, eax, ebx, edx, fail, TAG_OBJECT); |
332 | 338 |
339 if (want_allocation_info) { | |
340 __ mov(FieldOperand(eax, allocation_info_start), | |
341 Immediate(Handle<Map>(masm->isolate()->heap()-> | |
342 allocation_site_info_map()))); | |
343 __ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx); | |
344 // type_info_cell); | |
danno
2012/12/26 10:32:01
nit: delete comment
mvstanton
2013/01/03 14:40:43
Done.
| |
345 } | |
346 | |
333 // Copy the JS array part. | 347 // Copy the JS array part. |
334 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 348 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
335 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 349 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
336 __ mov(ebx, FieldOperand(ecx, i)); | 350 __ mov(ebx, FieldOperand(ecx, i)); |
337 __ mov(FieldOperand(eax, i), ebx); | 351 __ mov(FieldOperand(eax, i), ebx); |
338 } | 352 } |
339 } | 353 } |
340 | 354 |
341 if (length > 0) { | 355 if (length > 0) { |
342 // Get hold of the elements array of the boilerplate and setup the | 356 // Get hold of the elements array of the boilerplate and setup the |
343 // elements pointer in the resulting object. | 357 // elements pointer in the resulting object. |
344 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | 358 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); |
345 __ lea(edx, Operand(eax, JSArray::kSize)); | 359 if (want_allocation_info) { |
360 __ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize)); | |
361 } else { | |
362 __ lea(edx, Operand(eax, JSArray::kSize)); | |
363 } | |
346 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); | 364 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); |
347 | 365 |
348 // Copy the elements array. | 366 // Copy the elements array. |
349 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { | 367 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { |
350 for (int i = 0; i < elements_size; i += kPointerSize) { | 368 for (int i = 0; i < elements_size; i += kPointerSize) { |
351 __ mov(ebx, FieldOperand(ecx, i)); | 369 __ mov(ebx, FieldOperand(ecx, i)); |
352 __ mov(FieldOperand(edx, i), ebx); | 370 __ mov(FieldOperand(edx, i), ebx); |
353 } | 371 } |
354 } else { | 372 } else { |
355 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); | 373 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); |
(...skipping 29 matching lines...) Expand all Loading... | |
385 STATIC_ASSERT(kSmiTag == 0); | 403 STATIC_ASSERT(kSmiTag == 0); |
386 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, | 404 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, |
387 FixedArray::kHeaderSize)); | 405 FixedArray::kHeaderSize)); |
388 Factory* factory = masm->isolate()->factory(); | 406 Factory* factory = masm->isolate()->factory(); |
389 __ cmp(ecx, factory->undefined_value()); | 407 __ cmp(ecx, factory->undefined_value()); |
390 Label slow_case; | 408 Label slow_case; |
391 __ j(equal, &slow_case); | 409 __ j(equal, &slow_case); |
392 | 410 |
393 FastCloneShallowArrayStub::Mode mode = mode_; | 411 FastCloneShallowArrayStub::Mode mode = mode_; |
394 // ecx is boilerplate object. | 412 // ecx is boilerplate object. |
413 bool want_allocation_info = false; | |
414 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_INFO) { | |
415 mode = CLONE_ANY_ELEMENTS; | |
416 want_allocation_info = true; | |
417 } | |
418 | |
395 if (mode == CLONE_ANY_ELEMENTS) { | 419 if (mode == CLONE_ANY_ELEMENTS) { |
396 Label double_elements, check_fast_elements; | 420 Label double_elements, check_fast_elements; |
397 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); | 421 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); |
398 __ CheckMap(ebx, factory->fixed_cow_array_map(), | 422 __ CheckMap(ebx, factory->fixed_cow_array_map(), |
399 &check_fast_elements, DONT_DO_SMI_CHECK); | 423 &check_fast_elements, DONT_DO_SMI_CHECK); |
400 GenerateFastCloneShallowArrayCommon(masm, 0, | 424 GenerateFastCloneShallowArrayCommon(masm, 0, |
401 COPY_ON_WRITE_ELEMENTS, &slow_case); | 425 COPY_ON_WRITE_ELEMENTS, |
426 want_allocation_info, | |
427 &slow_case); | |
402 __ ret(3 * kPointerSize); | 428 __ ret(3 * kPointerSize); |
403 | 429 |
404 __ bind(&check_fast_elements); | 430 __ bind(&check_fast_elements); |
405 __ CheckMap(ebx, factory->fixed_array_map(), | 431 __ CheckMap(ebx, factory->fixed_array_map(), |
406 &double_elements, DONT_DO_SMI_CHECK); | 432 &double_elements, DONT_DO_SMI_CHECK); |
407 GenerateFastCloneShallowArrayCommon(masm, length_, | 433 GenerateFastCloneShallowArrayCommon(masm, length_, |
408 CLONE_ELEMENTS, &slow_case); | 434 CLONE_ELEMENTS, |
435 want_allocation_info, | |
436 &slow_case); | |
409 __ ret(3 * kPointerSize); | 437 __ ret(3 * kPointerSize); |
410 | 438 |
411 __ bind(&double_elements); | 439 __ bind(&double_elements); |
412 mode = CLONE_DOUBLE_ELEMENTS; | 440 mode = CLONE_DOUBLE_ELEMENTS; |
413 // Fall through to generate the code to handle double elements. | 441 // Fall through to generate the code to handle double elements. |
414 } | 442 } |
415 | 443 |
416 if (FLAG_debug_code) { | 444 if (FLAG_debug_code) { |
417 const char* message; | 445 const char* message; |
418 Handle<Map> expected_map; | 446 Handle<Map> expected_map; |
419 if (mode == CLONE_ELEMENTS) { | 447 if (mode == CLONE_ELEMENTS) { |
420 message = "Expected (writable) fixed array"; | 448 message = "Expected (writable) fixed array"; |
421 expected_map = factory->fixed_array_map(); | 449 expected_map = factory->fixed_array_map(); |
422 } else if (mode == CLONE_DOUBLE_ELEMENTS) { | 450 } else if (mode == CLONE_DOUBLE_ELEMENTS) { |
423 message = "Expected (writable) fixed double array"; | 451 message = "Expected (writable) fixed double array"; |
424 expected_map = factory->fixed_double_array_map(); | 452 expected_map = factory->fixed_double_array_map(); |
425 } else { | 453 } else { |
426 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); | 454 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); |
427 message = "Expected copy-on-write fixed array"; | 455 message = "Expected copy-on-write fixed array"; |
428 expected_map = factory->fixed_cow_array_map(); | 456 expected_map = factory->fixed_cow_array_map(); |
429 } | 457 } |
430 __ push(ecx); | 458 __ push(ecx); |
431 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | 459 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); |
432 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); | 460 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); |
433 __ Assert(equal, message); | 461 __ Assert(equal, message); |
434 __ pop(ecx); | 462 __ pop(ecx); |
435 } | 463 } |
436 | 464 |
437 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); | 465 GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
466 want_allocation_info, &slow_case); | |
438 // Return and remove the on-stack parameters. | 467 // Return and remove the on-stack parameters. |
439 __ ret(3 * kPointerSize); | 468 __ ret(3 * kPointerSize); |
440 | 469 |
441 __ bind(&slow_case); | 470 __ bind(&slow_case); |
442 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 471 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
443 } | 472 } |
444 | 473 |
445 | 474 |
446 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { | 475 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
447 // Stack layout on entry: | 476 // Stack layout on entry: |
(...skipping 7001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7449 // Restore ecx. | 7478 // Restore ecx. |
7450 __ pop(ecx); | 7479 __ pop(ecx); |
7451 __ ret(0); | 7480 __ ret(0); |
7452 } | 7481 } |
7453 | 7482 |
7454 #undef __ | 7483 #undef __ |
7455 | 7484 |
7456 } } // namespace v8::internal | 7485 } } // namespace v8::internal |
7457 | 7486 |
7458 #endif // V8_TARGET_ARCH_IA32 | 7487 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |