OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 25 matching lines...) Expand all Loading... |
36 #include "regexp-macro-assembler.h" | 36 #include "regexp-macro-assembler.h" |
37 #include "runtime.h" | 37 #include "runtime.h" |
38 #include "stub-cache.h" | 38 #include "stub-cache.h" |
39 #include "codegen.h" | 39 #include "codegen.h" |
40 #include "runtime.h" | 40 #include "runtime.h" |
41 | 41 |
42 namespace v8 { | 42 namespace v8 { |
43 namespace internal { | 43 namespace internal { |
44 | 44 |
45 | 45 |
| 46 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( |
| 47 Isolate* isolate, |
| 48 CodeStubInterfaceDescriptor* descriptor) { |
| 49 static Register registers[] = { eax, ebx, ecx }; |
| 50 descriptor->register_param_count_ = 3; |
| 51 descriptor->register_params_ = registers; |
| 52 descriptor->stack_parameter_count_ = NULL; |
| 53 descriptor->deoptimization_handler_ = |
| 54 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry; |
| 55 } |
| 56 |
| 57 |
46 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( | 58 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( |
47 Isolate* isolate, | 59 Isolate* isolate, |
48 CodeStubInterfaceDescriptor* descriptor) { | 60 CodeStubInterfaceDescriptor* descriptor) { |
49 static Register registers[] = { eax, ebx, ecx, edx }; | 61 static Register registers[] = { eax, ebx, ecx, edx }; |
50 descriptor->register_param_count_ = 4; | 62 descriptor->register_param_count_ = 4; |
51 descriptor->register_params_ = registers; | 63 descriptor->register_params_ = registers; |
52 descriptor->stack_parameter_count_ = NULL; | 64 descriptor->stack_parameter_count_ = NULL; |
53 descriptor->deoptimization_handler_ = | 65 descriptor->deoptimization_handler_ = |
54 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; | 66 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; |
55 } | 67 } |
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
386 // Return and remove the on-stack parameters. | 398 // Return and remove the on-stack parameters. |
387 __ mov(esi, eax); | 399 __ mov(esi, eax); |
388 __ ret(2 * kPointerSize); | 400 __ ret(2 * kPointerSize); |
389 | 401 |
390 // Need to collect. Call into runtime system. | 402 // Need to collect. Call into runtime system. |
391 __ bind(&gc); | 403 __ bind(&gc); |
392 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 404 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
393 } | 405 } |
394 | 406 |
395 | 407 |
396 static void GenerateFastCloneShallowArrayCommon( | |
397 MacroAssembler* masm, | |
398 int length, | |
399 FastCloneShallowArrayStub::Mode mode, | |
400 AllocationSiteMode allocation_site_mode, | |
401 Label* fail) { | |
402 // Registers on entry: | |
403 // | |
404 // ecx: boilerplate literal array. | |
405 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | |
406 | |
407 // All sizes here are multiples of kPointerSize. | |
408 int elements_size = 0; | |
409 if (length > 0) { | |
410 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | |
411 ? FixedDoubleArray::SizeFor(length) | |
412 : FixedArray::SizeFor(length); | |
413 } | |
414 int size = JSArray::kSize; | |
415 int allocation_info_start = size; | |
416 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | |
417 size += AllocationSiteInfo::kSize; | |
418 } | |
419 size += elements_size; | |
420 | |
421 // Allocate both the JS array and the elements array in one big | |
422 // allocation. This avoids multiple limit checks. | |
423 AllocationFlags flags = TAG_OBJECT; | |
424 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | |
425 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | |
426 } | |
427 __ Allocate(size, eax, ebx, edx, fail, flags); | |
428 | |
429 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | |
430 __ mov(FieldOperand(eax, allocation_info_start), | |
431 Immediate(Handle<Map>(masm->isolate()->heap()-> | |
432 allocation_site_info_map()))); | |
433 __ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx); | |
434 } | |
435 | |
436 // Copy the JS array part. | |
437 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | |
438 if ((i != JSArray::kElementsOffset) || (length == 0)) { | |
439 __ mov(ebx, FieldOperand(ecx, i)); | |
440 __ mov(FieldOperand(eax, i), ebx); | |
441 } | |
442 } | |
443 | |
444 if (length > 0) { | |
445 // Get hold of the elements array of the boilerplate and setup the | |
446 // elements pointer in the resulting object. | |
447 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | |
448 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | |
449 __ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize)); | |
450 } else { | |
451 __ lea(edx, Operand(eax, JSArray::kSize)); | |
452 } | |
453 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); | |
454 | |
455 // Copy the elements array. | |
456 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { | |
457 for (int i = 0; i < elements_size; i += kPointerSize) { | |
458 __ mov(ebx, FieldOperand(ecx, i)); | |
459 __ mov(FieldOperand(edx, i), ebx); | |
460 } | |
461 } else { | |
462 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); | |
463 int i; | |
464 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { | |
465 __ mov(ebx, FieldOperand(ecx, i)); | |
466 __ mov(FieldOperand(edx, i), ebx); | |
467 } | |
468 while (i < elements_size) { | |
469 __ fld_d(FieldOperand(ecx, i)); | |
470 __ fstp_d(FieldOperand(edx, i)); | |
471 i += kDoubleSize; | |
472 } | |
473 ASSERT(i == elements_size); | |
474 } | |
475 } | |
476 } | |
477 | |
478 | |
479 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | |
480 // Stack layout on entry: | |
481 // | |
482 // [esp + kPointerSize]: constant elements. | |
483 // [esp + (2 * kPointerSize)]: literal index. | |
484 // [esp + (3 * kPointerSize)]: literals array. | |
485 | |
486 // Load boilerplate object into ecx and check if we need to create a | |
487 // boilerplate. | |
488 __ mov(ecx, Operand(esp, 3 * kPointerSize)); | |
489 __ mov(eax, Operand(esp, 2 * kPointerSize)); | |
490 STATIC_ASSERT(kPointerSize == 4); | |
491 STATIC_ASSERT(kSmiTagSize == 1); | |
492 STATIC_ASSERT(kSmiTag == 0); | |
493 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, | |
494 FixedArray::kHeaderSize)); | |
495 Factory* factory = masm->isolate()->factory(); | |
496 __ cmp(ecx, factory->undefined_value()); | |
497 Label slow_case; | |
498 __ j(equal, &slow_case); | |
499 | |
500 FastCloneShallowArrayStub::Mode mode = mode_; | |
501 // ecx is boilerplate object. | |
502 if (mode == CLONE_ANY_ELEMENTS) { | |
503 Label double_elements, check_fast_elements; | |
504 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); | |
505 __ CheckMap(ebx, factory->fixed_cow_array_map(), | |
506 &check_fast_elements, DONT_DO_SMI_CHECK); | |
507 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS, | |
508 allocation_site_mode_, | |
509 &slow_case); | |
510 __ ret(3 * kPointerSize); | |
511 | |
512 __ bind(&check_fast_elements); | |
513 __ CheckMap(ebx, factory->fixed_array_map(), | |
514 &double_elements, DONT_DO_SMI_CHECK); | |
515 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, | |
516 allocation_site_mode_, | |
517 &slow_case); | |
518 __ ret(3 * kPointerSize); | |
519 | |
520 __ bind(&double_elements); | |
521 mode = CLONE_DOUBLE_ELEMENTS; | |
522 // Fall through to generate the code to handle double elements. | |
523 } | |
524 | |
525 if (FLAG_debug_code) { | |
526 const char* message; | |
527 Handle<Map> expected_map; | |
528 if (mode == CLONE_ELEMENTS) { | |
529 message = "Expected (writable) fixed array"; | |
530 expected_map = factory->fixed_array_map(); | |
531 } else if (mode == CLONE_DOUBLE_ELEMENTS) { | |
532 message = "Expected (writable) fixed double array"; | |
533 expected_map = factory->fixed_double_array_map(); | |
534 } else { | |
535 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); | |
536 message = "Expected copy-on-write fixed array"; | |
537 expected_map = factory->fixed_cow_array_map(); | |
538 } | |
539 __ push(ecx); | |
540 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | |
541 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); | |
542 __ Assert(equal, message); | |
543 __ pop(ecx); | |
544 } | |
545 | |
546 GenerateFastCloneShallowArrayCommon(masm, length_, mode, | |
547 allocation_site_mode_, | |
548 &slow_case); | |
549 | |
550 // Return and remove the on-stack parameters. | |
551 __ ret(3 * kPointerSize); | |
552 | |
553 __ bind(&slow_case); | |
554 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | |
555 } | |
556 | |
557 | |
558 // The stub expects its argument on the stack and returns its result in tos_: | 408 // The stub expects its argument on the stack and returns its result in tos_: |
559 // zero for false, and a non-zero value for true. | 409 // zero for false, and a non-zero value for true. |
560 void ToBooleanStub::Generate(MacroAssembler* masm) { | 410 void ToBooleanStub::Generate(MacroAssembler* masm) { |
561 // This stub overrides SometimesSetsUpAFrame() to return false. That means | 411 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
562 // we cannot call anything that could cause a GC from this stub. | 412 // we cannot call anything that could cause a GC from this stub. |
563 Label patch; | 413 Label patch; |
564 Factory* factory = masm->isolate()->factory(); | 414 Factory* factory = masm->isolate()->factory(); |
565 const Register argument = eax; | 415 const Register argument = eax; |
566 const Register map = edx; | 416 const Register map = edx; |
567 | 417 |
(...skipping 4501 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5069 | 4919 |
5070 bool CEntryStub::IsPregenerated() { | 4920 bool CEntryStub::IsPregenerated() { |
5071 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && | 4921 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && |
5072 result_size_ == 1; | 4922 result_size_ == 1; |
5073 } | 4923 } |
5074 | 4924 |
5075 | 4925 |
5076 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 4926 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
5077 CEntryStub::GenerateAheadOfTime(isolate); | 4927 CEntryStub::GenerateAheadOfTime(isolate); |
5078 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 4928 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 4929 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
5079 // It is important that the store buffer overflow stubs are generated first. | 4930 // It is important that the store buffer overflow stubs are generated first. |
5080 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 4931 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
5081 } | 4932 } |
5082 | 4933 |
5083 | 4934 |
5084 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 4935 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
5085 if (CpuFeatures::IsSupported(SSE2)) { | 4936 if (CpuFeatures::IsSupported(SSE2)) { |
5086 CEntryStub save_doubles(1, kSaveFPRegs); | 4937 CEntryStub save_doubles(1, kSaveFPRegs); |
5087 // Stubs might already be in the snapshot, detect that and don't regenerate, | 4938 // Stubs might already be in the snapshot, detect that and don't regenerate, |
5088 // which would lead to code stub initialization state being messed up. | 4939 // which would lead to code stub initialization state being messed up. |
(...skipping 2690 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7779 edi, | 7630 edi, |
7780 xmm0, | 7631 xmm0, |
7781 &slow_elements_from_double, | 7632 &slow_elements_from_double, |
7782 false); | 7633 false); |
7783 __ pop(edx); | 7634 __ pop(edx); |
7784 __ ret(0); | 7635 __ ret(0); |
7785 } | 7636 } |
7786 | 7637 |
7787 | 7638 |
7788 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 7639 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
7789 ASSERT(!Serializer::enabled()); | 7640 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
7790 bool save_fp_regs = CpuFeatures::IsSupported(SSE2); | |
7791 CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs); | |
7792 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 7641 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
7793 int parameter_count_offset = | 7642 int parameter_count_offset = |
7794 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 7643 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
7795 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); | 7644 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); |
7796 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 7645 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
7797 __ pop(ecx); | 7646 __ pop(ecx); |
7798 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 7647 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
7799 ? kPointerSize | 7648 ? kPointerSize |
7800 : 0; | 7649 : 0; |
7801 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); | 7650 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); |
(...skipping 30 matching lines...) Expand all Loading... |
7832 // Restore ecx. | 7681 // Restore ecx. |
7833 __ pop(ecx); | 7682 __ pop(ecx); |
7834 __ ret(0); | 7683 __ ret(0); |
7835 } | 7684 } |
7836 | 7685 |
7837 #undef __ | 7686 #undef __ |
7838 | 7687 |
7839 } } // namespace v8::internal | 7688 } } // namespace v8::internal |
7840 | 7689 |
7841 #endif // V8_TARGET_ARCH_IA32 | 7690 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |