| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 316 // Need to collect. Call into runtime system. | 316 // Need to collect. Call into runtime system. |
| 317 __ bind(&gc); | 317 __ bind(&gc); |
| 318 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 318 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
| 319 } | 319 } |
| 320 | 320 |
| 321 | 321 |
| 322 static void GenerateFastCloneShallowArrayCommon( | 322 static void GenerateFastCloneShallowArrayCommon( |
| 323 MacroAssembler* masm, | 323 MacroAssembler* masm, |
| 324 int length, | 324 int length, |
| 325 FastCloneShallowArrayStub::Mode mode, | 325 FastCloneShallowArrayStub::Mode mode, |
| 326 FastCloneShallowArrayStub::AllocationInfoMode allocation_info_mode, |
| 326 Label* fail) { | 327 Label* fail) { |
| 327 // Registers on entry: | 328 // Registers on entry: |
| 328 // | 329 // |
| 329 // ecx: boilerplate literal array. | 330 // ecx: boilerplate literal array. |
| 330 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | 331 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); |
| 331 | 332 |
| 332 // All sizes here are multiples of kPointerSize. | 333 // All sizes here are multiples of kPointerSize. |
| 333 int elements_size = 0; | 334 int elements_size = 0; |
| 334 if (length > 0) { | 335 if (length > 0) { |
| 335 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 336 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 336 ? FixedDoubleArray::SizeFor(length) | 337 ? FixedDoubleArray::SizeFor(length) |
| 337 : FixedArray::SizeFor(length); | 338 : FixedArray::SizeFor(length); |
| 338 } | 339 } |
| 339 int size = JSArray::kSize + elements_size; | 340 int size = JSArray::kSize; |
| 341 int allocation_info_start = size; |
| 342 if (allocation_info_mode == |
| 343 FastCloneShallowArrayStub::TRACK_ALLOCATION_INFO) { |
| 344 size += AllocationSiteInfo::kSize; |
| 345 } |
| 346 size += elements_size; |
| 340 | 347 |
| 341 // Allocate both the JS array and the elements array in one big | 348 // Allocate both the JS array and the elements array in one big |
| 342 // allocation. This avoids multiple limit checks. | 349 // allocation. This avoids multiple limit checks. |
| 343 AllocationFlags flags = TAG_OBJECT; | 350 AllocationFlags flags = TAG_OBJECT; |
| 344 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | 351 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { |
| 345 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | 352 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
| 346 } | 353 } |
| 347 __ AllocateInNewSpace(size, eax, ebx, edx, fail, flags); | 354 __ AllocateInNewSpace(size, eax, ebx, edx, fail, flags); |
| 348 | 355 |
| 356 if (allocation_info_mode == |
| 357 FastCloneShallowArrayStub::TRACK_ALLOCATION_INFO) { |
| 358 __ mov(FieldOperand(eax, allocation_info_start), |
| 359 Immediate(Handle<Map>(masm->isolate()->heap()-> |
| 360 allocation_site_info_map()))); |
| 361 __ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx); |
| 362 } |
| 363 |
| 349 // Copy the JS array part. | 364 // Copy the JS array part. |
| 350 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 365 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
| 351 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 366 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
| 352 __ mov(ebx, FieldOperand(ecx, i)); | 367 __ mov(ebx, FieldOperand(ecx, i)); |
| 353 __ mov(FieldOperand(eax, i), ebx); | 368 __ mov(FieldOperand(eax, i), ebx); |
| 354 } | 369 } |
| 355 } | 370 } |
| 356 | 371 |
| 357 if (length > 0) { | 372 if (length > 0) { |
| 358 // Get hold of the elements array of the boilerplate and setup the | 373 // Get hold of the elements array of the boilerplate and setup the |
| 359 // elements pointer in the resulting object. | 374 // elements pointer in the resulting object. |
| 360 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | 375 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); |
| 361 __ lea(edx, Operand(eax, JSArray::kSize)); | 376 if (allocation_info_mode == |
| 377 FastCloneShallowArrayStub::TRACK_ALLOCATION_INFO) { |
| 378 __ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize)); |
| 379 } else { |
| 380 __ lea(edx, Operand(eax, JSArray::kSize)); |
| 381 } |
| 362 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); | 382 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); |
| 363 | 383 |
| 364 // Copy the elements array. | 384 // Copy the elements array. |
| 365 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { | 385 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { |
| 366 for (int i = 0; i < elements_size; i += kPointerSize) { | 386 for (int i = 0; i < elements_size; i += kPointerSize) { |
| 367 __ mov(ebx, FieldOperand(ecx, i)); | 387 __ mov(ebx, FieldOperand(ecx, i)); |
| 368 __ mov(FieldOperand(edx, i), ebx); | 388 __ mov(FieldOperand(edx, i), ebx); |
| 369 } | 389 } |
| 370 } else { | 390 } else { |
| 371 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); | 391 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 401 STATIC_ASSERT(kSmiTag == 0); | 421 STATIC_ASSERT(kSmiTag == 0); |
| 402 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, | 422 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, |
| 403 FixedArray::kHeaderSize)); | 423 FixedArray::kHeaderSize)); |
| 404 Factory* factory = masm->isolate()->factory(); | 424 Factory* factory = masm->isolate()->factory(); |
| 405 __ cmp(ecx, factory->undefined_value()); | 425 __ cmp(ecx, factory->undefined_value()); |
| 406 Label slow_case; | 426 Label slow_case; |
| 407 __ j(equal, &slow_case); | 427 __ j(equal, &slow_case); |
| 408 | 428 |
| 409 FastCloneShallowArrayStub::Mode mode = mode_; | 429 FastCloneShallowArrayStub::Mode mode = mode_; |
| 410 // ecx is boilerplate object. | 430 // ecx is boilerplate object. |
| 431 FastCloneShallowArrayStub::AllocationInfoMode allocation_info_mode = |
| 432 DONT_TRACK_ALLOCATION_INFO; |
| 433 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_INFO) { |
| 434 mode = CLONE_ANY_ELEMENTS; |
| 435 allocation_info_mode = TRACK_ALLOCATION_INFO; |
| 436 } |
| 437 |
| 411 if (mode == CLONE_ANY_ELEMENTS) { | 438 if (mode == CLONE_ANY_ELEMENTS) { |
| 412 Label double_elements, check_fast_elements; | 439 Label double_elements, check_fast_elements; |
| 413 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); | 440 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); |
| 414 __ CheckMap(ebx, factory->fixed_cow_array_map(), | 441 __ CheckMap(ebx, factory->fixed_cow_array_map(), |
| 415 &check_fast_elements, DONT_DO_SMI_CHECK); | 442 &check_fast_elements, DONT_DO_SMI_CHECK); |
| 416 GenerateFastCloneShallowArrayCommon(masm, 0, | 443 GenerateFastCloneShallowArrayCommon(masm, 0, |
| 417 COPY_ON_WRITE_ELEMENTS, &slow_case); | 444 COPY_ON_WRITE_ELEMENTS, |
| 445 allocation_info_mode, |
| 446 &slow_case); |
| 418 __ ret(3 * kPointerSize); | 447 __ ret(3 * kPointerSize); |
| 419 | 448 |
| 420 __ bind(&check_fast_elements); | 449 __ bind(&check_fast_elements); |
| 421 __ CheckMap(ebx, factory->fixed_array_map(), | 450 __ CheckMap(ebx, factory->fixed_array_map(), |
| 422 &double_elements, DONT_DO_SMI_CHECK); | 451 &double_elements, DONT_DO_SMI_CHECK); |
| 423 GenerateFastCloneShallowArrayCommon(masm, length_, | 452 GenerateFastCloneShallowArrayCommon(masm, length_, |
| 424 CLONE_ELEMENTS, &slow_case); | 453 CLONE_ELEMENTS, |
| 454 allocation_info_mode, |
| 455 &slow_case); |
| 425 __ ret(3 * kPointerSize); | 456 __ ret(3 * kPointerSize); |
| 426 | 457 |
| 427 __ bind(&double_elements); | 458 __ bind(&double_elements); |
| 428 mode = CLONE_DOUBLE_ELEMENTS; | 459 mode = CLONE_DOUBLE_ELEMENTS; |
| 429 // Fall through to generate the code to handle double elements. | 460 // Fall through to generate the code to handle double elements. |
| 430 } | 461 } |
| 431 | 462 |
| 432 if (FLAG_debug_code) { | 463 if (FLAG_debug_code) { |
| 433 const char* message; | 464 const char* message; |
| 434 Handle<Map> expected_map; | 465 Handle<Map> expected_map; |
| 435 if (mode == CLONE_ELEMENTS) { | 466 if (mode == CLONE_ELEMENTS) { |
| 436 message = "Expected (writable) fixed array"; | 467 message = "Expected (writable) fixed array"; |
| 437 expected_map = factory->fixed_array_map(); | 468 expected_map = factory->fixed_array_map(); |
| 438 } else if (mode == CLONE_DOUBLE_ELEMENTS) { | 469 } else if (mode == CLONE_DOUBLE_ELEMENTS) { |
| 439 message = "Expected (writable) fixed double array"; | 470 message = "Expected (writable) fixed double array"; |
| 440 expected_map = factory->fixed_double_array_map(); | 471 expected_map = factory->fixed_double_array_map(); |
| 441 } else { | 472 } else { |
| 442 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); | 473 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); |
| 443 message = "Expected copy-on-write fixed array"; | 474 message = "Expected copy-on-write fixed array"; |
| 444 expected_map = factory->fixed_cow_array_map(); | 475 expected_map = factory->fixed_cow_array_map(); |
| 445 } | 476 } |
| 446 __ push(ecx); | 477 __ push(ecx); |
| 447 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | 478 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); |
| 448 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); | 479 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); |
| 449 __ Assert(equal, message); | 480 __ Assert(equal, message); |
| 450 __ pop(ecx); | 481 __ pop(ecx); |
| 451 } | 482 } |
| 452 | 483 |
| 453 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); | 484 GenerateFastCloneShallowArrayCommon(masm, length_, mode, |
| 485 allocation_info_mode, &slow_case); |
| 454 // Return and remove the on-stack parameters. | 486 // Return and remove the on-stack parameters. |
| 455 __ ret(3 * kPointerSize); | 487 __ ret(3 * kPointerSize); |
| 456 | 488 |
| 457 __ bind(&slow_case); | 489 __ bind(&slow_case); |
| 458 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 490 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
| 459 } | 491 } |
| 460 | 492 |
| 461 | 493 |
| 462 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { | 494 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { |
| 463 // Stack layout on entry: | 495 // Stack layout on entry: |
| (...skipping 7012 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7476 // Restore ecx. | 7508 // Restore ecx. |
| 7477 __ pop(ecx); | 7509 __ pop(ecx); |
| 7478 __ ret(0); | 7510 __ ret(0); |
| 7479 } | 7511 } |
| 7480 | 7512 |
| 7481 #undef __ | 7513 #undef __ |
| 7482 | 7514 |
| 7483 } } // namespace v8::internal | 7515 } } // namespace v8::internal |
| 7484 | 7516 |
| 7485 #endif // V8_TARGET_ARCH_IA32 | 7517 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |