Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 323 __ mov(cp, r0); | 323 __ mov(cp, r0); |
| 324 __ add(sp, sp, Operand(2 * kPointerSize)); | 324 __ add(sp, sp, Operand(2 * kPointerSize)); |
| 325 __ Ret(); | 325 __ Ret(); |
| 326 | 326 |
| 327 // Need to collect. Call into runtime system. | 327 // Need to collect. Call into runtime system. |
| 328 __ bind(&gc); | 328 __ bind(&gc); |
| 329 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 329 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
| 330 } | 330 } |
| 331 | 331 |
| 332 | 332 |
| 333 int FastCloneShallowArrayStub::MaximumClonedLength() { | |
| 334 return CpuFeatures::IsSupported(VFP2) ? 128 : 8; | |
|
danno
2012/10/29 15:29:24
By increasing this to 128, V8 will generate 128 ve
| |
| 335 } | |
| 336 | |
| 337 | |
| 333 static void GenerateFastCloneShallowArrayCommon( | 338 static void GenerateFastCloneShallowArrayCommon( |
| 334 MacroAssembler* masm, | 339 MacroAssembler* masm, |
| 335 int length, | 340 int length, |
| 336 FastCloneShallowArrayStub::Mode mode, | 341 FastCloneShallowArrayStub::Mode mode, |
| 337 Label* fail) { | 342 Label* fail) { |
| 338 // Registers on entry: | 343 // Registers on entry: |
| 339 // | 344 // |
| 340 // r3: boilerplate literal array. | 345 // r3: boilerplate literal array. |
| 341 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | 346 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
| 342 | 347 |
| 343 // All sizes here are multiples of kPointerSize. | 348 // All sizes here are multiples of kPointerSize. |
| 344 int elements_size = 0; | 349 int elements_size = 0; |
| 345 if (length > 0) { | 350 if (length > 0) { |
| 346 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 351 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 347 ? FixedDoubleArray::SizeFor(length) | 352 ? FixedDoubleArray::SizeFor(length) |
| 348 : FixedArray::SizeFor(length); | 353 : FixedArray::SizeFor(length); |
| 349 } | 354 } |
| 350 int size = JSArray::kSize + elements_size; | 355 int elements_offset = JSArray::kSize; |
| 356 int size = elements_offset + elements_size; | |
| 351 | 357 |
| 352 // Allocate both the JS array and the elements array in one big | 358 // Allocate both the JS array and the elements array in one big |
| 353 // allocation. This avoids multiple limit checks. | 359 // allocation. This avoids multiple limit checks. |
| 354 __ AllocateInNewSpace(size, | 360 __ AllocateInNewSpace(size, |
| 355 r0, | 361 r0, |
| 356 r1, | 362 r1, |
| 357 r2, | 363 r2, |
| 358 fail, | 364 fail, |
| 359 TAG_OBJECT); | 365 NO_ALLOCATION_FLAGS); |
| 360 | 366 |
| 361 // Copy the JS array part. | 367 // Copy the JS array part. |
| 362 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 368 __ sub(r3, r3, Operand(kHeapObjectTag)); |
| 363 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 369 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); |
| 364 __ ldr(r1, FieldMemOperand(r3, i)); | 370 RegList temps = r4.bit() | r5.bit() | r6.bit() | r7.bit(); |
| 365 __ str(r1, FieldMemOperand(r0, i)); | 371 __ ldm(ia_w, r3, temps); |
| 372 if (length > 0) { | |
| 373 // Set the pointer to the elements array if it is non-empty. r0 is untagged, | |
| 374 // so we need to include the tag here. | |
| 375 STATIC_ASSERT(JSArray::kElementsOffset == 2 * kPointerSize); | |
| 376 __ add(r6, r0, Operand(elements_offset + kHeapObjectTag)); | |
| 377 } | |
| 378 __ stm(ia_w, r0, temps); | |
| 379 | |
| 380 // Copy the elements from the boilerplate array, if needed. At this point, | |
| 381 // r0 points to the beginning of elements. | |
| 382 int restore_size = JSArray::kSize - kHeapObjectTag; | |
| 383 if (length > 0) { | |
| 384 // Load the elements array from the boilerplate. | |
| 385 __ ldr(r3, MemOperand(r3, JSArray::kElementsOffset - JSArray::kSize)); | |
| 386 ASSERT((elements_size % kPointerSize) == 0); | |
| 387 if (CpuFeatures::IsSupported(VFP2)) { | |
| 388 CpuFeatures::Scope scope(VFP2); | |
| 389 SwVfpRegister first_reg = s0; | |
| 390 SwVfpRegister last_reg = s15; | |
| 391 __ sub(r3, r3, Operand(kHeapObjectTag)); | |
| 392 __ VFPCopyFields(r0, r3, elements_size / kPointerSize, | |
| 393 first_reg, last_reg); | |
| 394 restore_size += elements_size; | |
| 395 } else { | |
| 396 __ CopyFields(r6, r3, r1.bit(), elements_size / kPointerSize); | |
| 366 } | 397 } |
| 367 } | 398 } |
| 368 | 399 |
| 369 if (length > 0) { | 400 // At this point, r0 points to the end of the copied region. Bring it back to |
| 370 // Get hold of the elements array of the boilerplate and setup the | 401 // the beginning and tag it. |
| 371 // elements pointer in the resulting object. | 402 __ sub(r0, r0, Operand(restore_size)); |
| 372 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); | |
| 373 __ add(r2, r0, Operand(JSArray::kSize)); | |
| 374 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); | |
| 375 | |
| 376 // Copy the elements array. | |
| 377 ASSERT((elements_size % kPointerSize) == 0); | |
| 378 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); | |
| 379 } | |
| 380 } | 403 } |
| 381 | 404 |
| 382 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 405 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
| 383 // Stack layout on entry: | 406 // Stack layout on entry: |
| 384 // | 407 // |
| 385 // [sp]: constant elements. | 408 // [sp]: constant elements. |
| 386 // [sp + kPointerSize]: literal index. | 409 // [sp + kPointerSize]: literal index. |
| 387 // [sp + (2 * kPointerSize)]: literals array. | 410 // [sp + (2 * kPointerSize)]: literals array. |
| 388 | 411 |
| 389 // Load boilerplate object into r3 and check if we need to create a | 412 // Load boilerplate object into r3 and check if we need to create a |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 449 | 472 |
| 450 // Return and remove the on-stack parameters. | 473 // Return and remove the on-stack parameters. |
| 451 __ add(sp, sp, Operand(3 * kPointerSize)); | 474 __ add(sp, sp, Operand(3 * kPointerSize)); |
| 452 __ Ret(); | 475 __ Ret(); |
| 453 | 476 |
| 454 __ bind(&slow_case); | 477 __ bind(&slow_case); |
| 455 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 478 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
| 456 } | 479 } |
| 457 | 480 |
| 458 | 481 |
| 482 int FastCloneShallowObjectStub::MaximumClonedProperties() { | |
| 483 return CpuFeatures::IsSupported(VFP2) ? 96 : 6; | |
|
danno
2012/10/29 15:29:24
Similarly here, please keep at constant 6, unless
| |
| 484 } | |
| 485 | |
| 486 | |
| 459 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { | 487 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
| 460 // Stack layout on entry: | 488 // Stack layout on entry: |
| 461 // | 489 // |
| 462 // [sp]: object literal flags. | 490 // [sp]: object literal flags. |
| 463 // [sp + kPointerSize]: constant properties. | 491 // [sp + kPointerSize]: constant properties. |
| 464 // [sp + (2 * kPointerSize)]: literal index. | 492 // [sp + (2 * kPointerSize)]: literal index. |
| 465 // [sp + (3 * kPointerSize)]: literals array. | 493 // [sp + (3 * kPointerSize)]: literals array. |
| 466 | 494 |
| 467 // Load boilerplate object into r3 and check if we need to create a | 495 // Load boilerplate object into r3 and check if we need to create a |
| 468 // boilerplate. | 496 // boilerplate. |
| 469 Label slow_case; | 497 Label slow_case; |
| 470 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); | 498 __ ldr(r3, MemOperand(sp, 3 * kPointerSize)); |
| 471 __ ldr(r0, MemOperand(sp, 2 * kPointerSize)); | 499 __ ldr(r0, MemOperand(sp, 2 * kPointerSize)); |
| 472 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 500 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 473 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 501 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 474 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 502 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); |
| 475 __ b(eq, &slow_case); | 503 __ b(eq, &slow_case); |
| 476 | 504 |
| 477 // Check that the boilerplate contains only fast properties and we can | 505 // Check that the boilerplate contains only fast properties and we can |
| 478 // statically determine the instance size. | 506 // statically determine the instance size. |
| 479 int size = JSObject::kHeaderSize + length_ * kPointerSize; | 507 int size = JSObject::kHeaderSize + length_ * kPointerSize; |
| 480 __ ldr(r0, FieldMemOperand(r3, HeapObject::kMapOffset)); | 508 __ ldr(r0, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 481 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceSizeOffset)); | 509 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceSizeOffset)); |
| 482 __ cmp(r0, Operand(size >> kPointerSizeLog2)); | 510 __ cmp(r0, Operand(size >> kPointerSizeLog2)); |
| 483 __ b(ne, &slow_case); | 511 __ b(ne, &slow_case); |
| 484 | 512 |
| 485 // Allocate the JS object and copy header together with all in-object | 513 // Allocate the JS object and copy header together with all in-object |
| 486 // properties from the boilerplate. | 514 // properties from the boilerplate. |
| 487 __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, TAG_OBJECT); | 515 if (CpuFeatures::IsSupported(VFP2)) { |
| 488 for (int i = 0; i < size; i += kPointerSize) { | 516 CpuFeatures::Scope scope(VFP2); |
| 489 __ ldr(r1, FieldMemOperand(r3, i)); | 517 __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, NO_ALLOCATION_FLAGS); |
| 490 __ str(r1, FieldMemOperand(r0, i)); | 518 SwVfpRegister first_reg = s0; |
| 519 SwVfpRegister last_reg = s15; | |
| 520 __ sub(r3, r3, Operand(kHeapObjectTag)); | |
| 521 __ VFPCopyFields(r0, r3, size / kPointerSize, first_reg, last_reg); | |
| 522 __ sub(r0, r0, Operand(size - kHeapObjectTag)); | |
| 523 } else { | |
| 524 __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, TAG_OBJECT); | |
| 525 for (int i = 0; i < size; i += kPointerSize) { | |
| 526 __ ldr(r1, FieldMemOperand(r3, i)); | |
| 527 __ str(r1, FieldMemOperand(r0, i)); | |
| 528 } | |
| 491 } | 529 } |
| 492 | 530 |
| 493 // Return and remove the on-stack parameters. | 531 // Return and remove the on-stack parameters. |
| 494 __ add(sp, sp, Operand(4 * kPointerSize)); | 532 __ add(sp, sp, Operand(4 * kPointerSize)); |
| 495 __ Ret(); | 533 __ Ret(); |
| 496 | 534 |
| 497 __ bind(&slow_case); | 535 __ bind(&slow_case); |
| 498 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); | 536 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); |
| 499 } | 537 } |
| 500 | 538 |
| (...skipping 7112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7613 | 7651 |
| 7614 __ Pop(lr, r5, r1); | 7652 __ Pop(lr, r5, r1); |
| 7615 __ Ret(); | 7653 __ Ret(); |
| 7616 } | 7654 } |
| 7617 | 7655 |
| 7618 #undef __ | 7656 #undef __ |
| 7619 | 7657 |
| 7620 } } // namespace v8::internal | 7658 } } // namespace v8::internal |
| 7621 | 7659 |
| 7622 #endif // V8_TARGET_ARCH_ARM | 7660 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |