OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 25 matching lines...) Expand all Loading... |
36 #include "regexp-macro-assembler.h" | 36 #include "regexp-macro-assembler.h" |
37 #include "runtime.h" | 37 #include "runtime.h" |
38 #include "stub-cache.h" | 38 #include "stub-cache.h" |
39 #include "codegen.h" | 39 #include "codegen.h" |
40 #include "runtime.h" | 40 #include "runtime.h" |
41 | 41 |
42 namespace v8 { | 42 namespace v8 { |
43 namespace internal { | 43 namespace internal { |
44 | 44 |
45 | 45 |
| 46 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( |
| 47 Isolate* isolate, |
| 48 CodeStubInterfaceDescriptor* descriptor) { |
| 49 static Register registers[] = { eax, ebx, ecx }; |
| 50 descriptor->register_param_count_ = 3; |
| 51 descriptor->register_params_ = registers; |
| 52 descriptor->stack_parameter_count_ = NULL; |
| 53 descriptor->deoptimization_handler_ = |
| 54 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry; |
| 55 } |
| 56 |
| 57 |
46 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( | 58 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( |
47 Isolate* isolate, | 59 Isolate* isolate, |
48 CodeStubInterfaceDescriptor* descriptor) { | 60 CodeStubInterfaceDescriptor* descriptor) { |
49 static Register registers[] = { eax, ebx, ecx, edx }; | 61 static Register registers[] = { eax, ebx, ecx, edx }; |
50 descriptor->register_param_count_ = 4; | 62 descriptor->register_param_count_ = 4; |
51 descriptor->register_params_ = registers; | 63 descriptor->register_params_ = registers; |
52 descriptor->stack_parameter_count_ = NULL; | 64 descriptor->stack_parameter_count_ = NULL; |
53 descriptor->deoptimization_handler_ = | 65 descriptor->deoptimization_handler_ = |
54 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; | 66 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; |
55 } | 67 } |
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
376 // Return and remove the on-stack parameters. | 388 // Return and remove the on-stack parameters. |
377 __ mov(esi, eax); | 389 __ mov(esi, eax); |
378 __ ret(2 * kPointerSize); | 390 __ ret(2 * kPointerSize); |
379 | 391 |
380 // Need to collect. Call into runtime system. | 392 // Need to collect. Call into runtime system. |
381 __ bind(&gc); | 393 __ bind(&gc); |
382 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | 394 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
383 } | 395 } |
384 | 396 |
385 | 397 |
386 static void GenerateFastCloneShallowArrayCommon( | |
387 MacroAssembler* masm, | |
388 int length, | |
389 FastCloneShallowArrayStub::Mode mode, | |
390 AllocationSiteMode allocation_site_mode, | |
391 Label* fail) { | |
392 // Registers on entry: | |
393 // | |
394 // ecx: boilerplate literal array. | |
395 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); | |
396 | |
397 // All sizes here are multiples of kPointerSize. | |
398 int elements_size = 0; | |
399 if (length > 0) { | |
400 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | |
401 ? FixedDoubleArray::SizeFor(length) | |
402 : FixedArray::SizeFor(length); | |
403 } | |
404 int size = JSArray::kSize; | |
405 int allocation_info_start = size; | |
406 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | |
407 size += AllocationSiteInfo::kSize; | |
408 } | |
409 size += elements_size; | |
410 | |
411 // Allocate both the JS array and the elements array in one big | |
412 // allocation. This avoids multiple limit checks. | |
413 AllocationFlags flags = TAG_OBJECT; | |
414 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | |
415 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | |
416 } | |
417 __ AllocateInNewSpace(size, eax, ebx, edx, fail, flags); | |
418 | |
419 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | |
420 __ mov(FieldOperand(eax, allocation_info_start), | |
421 Immediate(Handle<Map>(masm->isolate()->heap()-> | |
422 allocation_site_info_map()))); | |
423 __ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx); | |
424 } | |
425 | |
426 // Copy the JS array part. | |
427 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | |
428 if ((i != JSArray::kElementsOffset) || (length == 0)) { | |
429 __ mov(ebx, FieldOperand(ecx, i)); | |
430 __ mov(FieldOperand(eax, i), ebx); | |
431 } | |
432 } | |
433 | |
434 if (length > 0) { | |
435 // Get hold of the elements array of the boilerplate and setup the | |
436 // elements pointer in the resulting object. | |
437 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | |
438 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | |
439 __ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize)); | |
440 } else { | |
441 __ lea(edx, Operand(eax, JSArray::kSize)); | |
442 } | |
443 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); | |
444 | |
445 // Copy the elements array. | |
446 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { | |
447 for (int i = 0; i < elements_size; i += kPointerSize) { | |
448 __ mov(ebx, FieldOperand(ecx, i)); | |
449 __ mov(FieldOperand(edx, i), ebx); | |
450 } | |
451 } else { | |
452 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); | |
453 int i; | |
454 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { | |
455 __ mov(ebx, FieldOperand(ecx, i)); | |
456 __ mov(FieldOperand(edx, i), ebx); | |
457 } | |
458 while (i < elements_size) { | |
459 __ fld_d(FieldOperand(ecx, i)); | |
460 __ fstp_d(FieldOperand(edx, i)); | |
461 i += kDoubleSize; | |
462 } | |
463 ASSERT(i == elements_size); | |
464 } | |
465 } | |
466 } | |
467 | |
468 | |
469 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | |
470 // Stack layout on entry: | |
471 // | |
472 // [esp + kPointerSize]: constant elements. | |
473 // [esp + (2 * kPointerSize)]: literal index. | |
474 // [esp + (3 * kPointerSize)]: literals array. | |
475 | |
476 // Load boilerplate object into ecx and check if we need to create a | |
477 // boilerplate. | |
478 __ mov(ecx, Operand(esp, 3 * kPointerSize)); | |
479 __ mov(eax, Operand(esp, 2 * kPointerSize)); | |
480 STATIC_ASSERT(kPointerSize == 4); | |
481 STATIC_ASSERT(kSmiTagSize == 1); | |
482 STATIC_ASSERT(kSmiTag == 0); | |
483 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, | |
484 FixedArray::kHeaderSize)); | |
485 Factory* factory = masm->isolate()->factory(); | |
486 __ cmp(ecx, factory->undefined_value()); | |
487 Label slow_case; | |
488 __ j(equal, &slow_case); | |
489 | |
490 FastCloneShallowArrayStub::Mode mode = mode_; | |
491 // ecx is boilerplate object. | |
492 if (mode == CLONE_ANY_ELEMENTS) { | |
493 Label double_elements, check_fast_elements; | |
494 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); | |
495 __ CheckMap(ebx, factory->fixed_cow_array_map(), | |
496 &check_fast_elements, DONT_DO_SMI_CHECK); | |
497 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS, | |
498 allocation_site_mode_, | |
499 &slow_case); | |
500 __ ret(3 * kPointerSize); | |
501 | |
502 __ bind(&check_fast_elements); | |
503 __ CheckMap(ebx, factory->fixed_array_map(), | |
504 &double_elements, DONT_DO_SMI_CHECK); | |
505 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS, | |
506 allocation_site_mode_, | |
507 &slow_case); | |
508 __ ret(3 * kPointerSize); | |
509 | |
510 __ bind(&double_elements); | |
511 mode = CLONE_DOUBLE_ELEMENTS; | |
512 // Fall through to generate the code to handle double elements. | |
513 } | |
514 | |
515 if (FLAG_debug_code) { | |
516 const char* message; | |
517 Handle<Map> expected_map; | |
518 if (mode == CLONE_ELEMENTS) { | |
519 message = "Expected (writable) fixed array"; | |
520 expected_map = factory->fixed_array_map(); | |
521 } else if (mode == CLONE_DOUBLE_ELEMENTS) { | |
522 message = "Expected (writable) fixed double array"; | |
523 expected_map = factory->fixed_double_array_map(); | |
524 } else { | |
525 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); | |
526 message = "Expected copy-on-write fixed array"; | |
527 expected_map = factory->fixed_cow_array_map(); | |
528 } | |
529 __ push(ecx); | |
530 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); | |
531 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); | |
532 __ Assert(equal, message); | |
533 __ pop(ecx); | |
534 } | |
535 | |
536 GenerateFastCloneShallowArrayCommon(masm, length_, mode, | |
537 allocation_site_mode_, | |
538 &slow_case); | |
539 | |
540 // Return and remove the on-stack parameters. | |
541 __ ret(3 * kPointerSize); | |
542 | |
543 __ bind(&slow_case); | |
544 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | |
545 } | |
546 | |
547 | |
548 // The stub expects its argument on the stack and returns its result in tos_: | 398 // The stub expects its argument on the stack and returns its result in tos_: |
549 // zero for false, and a non-zero value for true. | 399 // zero for false, and a non-zero value for true. |
550 void ToBooleanStub::Generate(MacroAssembler* masm) { | 400 void ToBooleanStub::Generate(MacroAssembler* masm) { |
551 // This stub overrides SometimesSetsUpAFrame() to return false. That means | 401 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
552 // we cannot call anything that could cause a GC from this stub. | 402 // we cannot call anything that could cause a GC from this stub. |
553 Label patch; | 403 Label patch; |
554 Factory* factory = masm->isolate()->factory(); | 404 Factory* factory = masm->isolate()->factory(); |
555 const Register argument = eax; | 405 const Register argument = eax; |
556 const Register map = edx; | 406 const Register map = edx; |
557 | 407 |
(...skipping 4542 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5100 | 4950 |
5101 bool CEntryStub::IsPregenerated() { | 4951 bool CEntryStub::IsPregenerated() { |
5102 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && | 4952 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && |
5103 result_size_ == 1; | 4953 result_size_ == 1; |
5104 } | 4954 } |
5105 | 4955 |
5106 | 4956 |
5107 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 4957 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
5108 CEntryStub::GenerateAheadOfTime(isolate); | 4958 CEntryStub::GenerateAheadOfTime(isolate); |
5109 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 4959 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
| 4960 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
5110 // It is important that the store buffer overflow stubs are generated first. | 4961 // It is important that the store buffer overflow stubs are generated first. |
5111 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 4962 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
5112 } | 4963 } |
5113 | 4964 |
5114 | 4965 |
5115 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 4966 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
5116 if (CpuFeatures::IsSupported(SSE2)) { | 4967 if (CpuFeatures::IsSupported(SSE2)) { |
5117 CEntryStub save_doubles(1, kSaveFPRegs); | 4968 CEntryStub save_doubles(1, kSaveFPRegs); |
5118 // Stubs might already be in the snapshot, detect that and don't regenerate, | 4969 // Stubs might already be in the snapshot, detect that and don't regenerate, |
5119 // which would lead to code stub initialization state being messed up. | 4970 // which would lead to code stub initialization state being messed up. |
(...skipping 2700 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7820 edi, | 7671 edi, |
7821 xmm0, | 7672 xmm0, |
7822 &slow_elements_from_double, | 7673 &slow_elements_from_double, |
7823 false); | 7674 false); |
7824 __ pop(edx); | 7675 __ pop(edx); |
7825 __ ret(0); | 7676 __ ret(0); |
7826 } | 7677 } |
7827 | 7678 |
7828 | 7679 |
7829 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 7680 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
7830 ASSERT(!Serializer::enabled()); | 7681 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
7831 bool save_fp_regs = CpuFeatures::IsSupported(SSE2); | |
7832 CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs); | |
7833 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 7682 __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
7834 int parameter_count_offset = | 7683 int parameter_count_offset = |
7835 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 7684 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
7836 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); | 7685 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); |
7837 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 7686 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
7838 __ pop(ecx); | 7687 __ pop(ecx); |
7839 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, | 7688 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, |
7840 extra_expression_stack_count_ * kPointerSize)); | 7689 extra_expression_stack_count_ * kPointerSize)); |
7841 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. | 7690 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. |
7842 } | 7691 } |
(...skipping 28 matching lines...) Expand all Loading... |
7871 // Restore ecx. | 7720 // Restore ecx. |
7872 __ pop(ecx); | 7721 __ pop(ecx); |
7873 __ ret(0); | 7722 __ ret(0); |
7874 } | 7723 } |
7875 | 7724 |
7876 #undef __ | 7725 #undef __ |
7877 | 7726 |
7878 } } // namespace v8::internal | 7727 } } // namespace v8::internal |
7879 | 7728 |
7880 #endif // V8_TARGET_ARCH_IA32 | 7729 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |