OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5678 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5689 | 5689 |
5690 frame_->Push(&literals); | 5690 frame_->Push(&literals); |
5691 frame_->Push(Smi::FromInt(node->literal_index())); | 5691 frame_->Push(Smi::FromInt(node->literal_index())); |
5692 frame_->Push(node->constant_elements()); | 5692 frame_->Push(node->constant_elements()); |
5693 int length = node->values()->length(); | 5693 int length = node->values()->length(); |
5694 Result clone; | 5694 Result clone; |
5695 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) { | 5695 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) { |
5696 FastCloneShallowArrayStub stub( | 5696 FastCloneShallowArrayStub stub( |
5697 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); | 5697 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); |
5698 clone = frame_->CallStub(&stub, 3); | 5698 clone = frame_->CallStub(&stub, 3); |
5699 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1); | 5699 Counters* counters = masm()->isolate()->counters(); |
| 5700 __ IncrementCounter(counters->cow_arrays_created_stub(), 1); |
5700 } else if (node->depth() > 1) { | 5701 } else if (node->depth() > 1) { |
5701 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); | 5702 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
5702 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 5703 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
5703 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); | 5704 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); |
5704 } else { | 5705 } else { |
5705 FastCloneShallowArrayStub stub( | 5706 FastCloneShallowArrayStub stub( |
5706 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); | 5707 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); |
5707 clone = frame_->CallStub(&stub, 3); | 5708 clone = frame_->CallStub(&stub, 3); |
5708 } | 5709 } |
5709 frame_->Push(&clone); | 5710 frame_->Push(&clone); |
(...skipping 3692 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9402 // case was inlined. | 9403 // case was inlined. |
9403 // - a mov ecx or mov edx instruction to indicate that the | 9404 // - a mov ecx or mov edx instruction to indicate that the |
9404 // contextual property load was inlined. | 9405 // contextual property load was inlined. |
9405 // | 9406 // |
9406 // Store the delta to the map check instruction here in the test | 9407 // Store the delta to the map check instruction here in the test |
9407 // instruction. Use masm_-> instead of the __ macro since the | 9408 // instruction. Use masm_-> instead of the __ macro since the |
9408 // latter can't return a value. | 9409 // latter can't return a value. |
9409 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); | 9410 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
9410 // Here we use masm_-> instead of the __ macro because this is the | 9411 // Here we use masm_-> instead of the __ macro because this is the |
9411 // instruction that gets patched and coverage code gets in the way. | 9412 // instruction that gets patched and coverage code gets in the way. |
| 9413 Counters* counters = masm()->isolate()->counters(); |
9412 if (is_contextual_) { | 9414 if (is_contextual_) { |
9413 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site); | 9415 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site); |
9414 __ IncrementCounter(COUNTERS->named_load_global_inline_miss(), 1); | 9416 __ IncrementCounter(counters->named_load_global_inline_miss(), 1); |
9415 if (is_dont_delete_) { | 9417 if (is_dont_delete_) { |
9416 __ IncrementCounter(COUNTERS->dont_delete_hint_miss(), 1); | 9418 __ IncrementCounter(counters->dont_delete_hint_miss(), 1); |
9417 } | 9419 } |
9418 } else { | 9420 } else { |
9419 masm_->test(eax, Immediate(-delta_to_patch_site)); | 9421 masm_->test(eax, Immediate(-delta_to_patch_site)); |
9420 __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1); | 9422 __ IncrementCounter(counters->named_load_inline_miss(), 1); |
9421 } | 9423 } |
9422 | 9424 |
9423 if (!dst_.is(eax)) __ mov(dst_, eax); | 9425 if (!dst_.is(eax)) __ mov(dst_, eax); |
9424 } | 9426 } |
9425 | 9427 |
9426 | 9428 |
9427 class DeferredReferenceGetKeyedValue: public DeferredCode { | 9429 class DeferredReferenceGetKeyedValue: public DeferredCode { |
9428 public: | 9430 public: |
9429 explicit DeferredReferenceGetKeyedValue(Register dst, | 9431 explicit DeferredReferenceGetKeyedValue(Register dst, |
9430 Register receiver, | 9432 Register receiver, |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9476 __ call(ic, RelocInfo::CODE_TARGET); | 9478 __ call(ic, RelocInfo::CODE_TARGET); |
9477 // The delta from the start of the map-compare instruction to the | 9479 // The delta from the start of the map-compare instruction to the |
9478 // test instruction. We use masm_-> directly here instead of the __ | 9480 // test instruction. We use masm_-> directly here instead of the __ |
9479 // macro because the macro sometimes uses macro expansion to turn | 9481 // macro because the macro sometimes uses macro expansion to turn |
9480 // into something that can't return a value. This is encountered | 9482 // into something that can't return a value. This is encountered |
9481 // when doing generated code coverage tests. | 9483 // when doing generated code coverage tests. |
9482 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); | 9484 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
9483 // Here we use masm_-> instead of the __ macro because this is the | 9485 // Here we use masm_-> instead of the __ macro because this is the |
9484 // instruction that gets patched and coverage code gets in the way. | 9486 // instruction that gets patched and coverage code gets in the way. |
9485 masm_->test(eax, Immediate(-delta_to_patch_site)); | 9487 masm_->test(eax, Immediate(-delta_to_patch_site)); |
9486 __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1); | 9488 Counters* counters = masm()->isolate()->counters(); |
| 9489 __ IncrementCounter(counters->keyed_load_inline_miss(), 1); |
9487 | 9490 |
9488 if (!dst_.is(eax)) __ mov(dst_, eax); | 9491 if (!dst_.is(eax)) __ mov(dst_, eax); |
9489 } | 9492 } |
9490 | 9493 |
9491 | 9494 |
9492 class DeferredReferenceSetKeyedValue: public DeferredCode { | 9495 class DeferredReferenceSetKeyedValue: public DeferredCode { |
9493 public: | 9496 public: |
9494 DeferredReferenceSetKeyedValue(Register value, | 9497 DeferredReferenceSetKeyedValue(Register value, |
9495 Register key, | 9498 Register key, |
9496 Register receiver, | 9499 Register receiver, |
(...skipping 15 matching lines...) Expand all Loading... |
9512 Register value_; | 9515 Register value_; |
9513 Register key_; | 9516 Register key_; |
9514 Register receiver_; | 9517 Register receiver_; |
9515 Register scratch_; | 9518 Register scratch_; |
9516 Label patch_site_; | 9519 Label patch_site_; |
9517 StrictModeFlag strict_mode_; | 9520 StrictModeFlag strict_mode_; |
9518 }; | 9521 }; |
9519 | 9522 |
9520 | 9523 |
9521 void DeferredReferenceSetKeyedValue::Generate() { | 9524 void DeferredReferenceSetKeyedValue::Generate() { |
9522 __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1); | 9525 Counters* counters = masm()->isolate()->counters(); |
| 9526 __ IncrementCounter(counters->keyed_store_inline_miss(), 1); |
9523 // Move value_ to eax, key_ to ecx, and receiver_ to edx. | 9527 // Move value_ to eax, key_ to ecx, and receiver_ to edx. |
9524 Register old_value = value_; | 9528 Register old_value = value_; |
9525 | 9529 |
9526 // First, move value to eax. | 9530 // First, move value to eax. |
9527 if (!value_.is(eax)) { | 9531 if (!value_.is(eax)) { |
9528 if (key_.is(eax)) { | 9532 if (key_.is(eax)) { |
9529 // Move key_ out of eax, preferably to ecx. | 9533 // Move key_ out of eax, preferably to ecx. |
9530 if (!value_.is(ecx) && !receiver_.is(ecx)) { | 9534 if (!value_.is(ecx) && !receiver_.is(ecx)) { |
9531 __ mov(ecx, key_); | 9535 __ mov(ecx, key_); |
9532 key_ = ecx; | 9536 key_ = ecx; |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9589 // Restore value (returned from store IC) register. | 9593 // Restore value (returned from store IC) register. |
9590 if (!old_value.is(eax)) __ mov(old_value, eax); | 9594 if (!old_value.is(eax)) __ mov(old_value, eax); |
9591 } | 9595 } |
9592 | 9596 |
9593 | 9597 |
9594 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { | 9598 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
9595 #ifdef DEBUG | 9599 #ifdef DEBUG |
9596 int original_height = frame()->height(); | 9600 int original_height = frame()->height(); |
9597 #endif | 9601 #endif |
9598 | 9602 |
| 9603 Isolate* isolate = masm()->isolate(); |
| 9604 Factory* factory = isolate->factory(); |
| 9605 Counters* counters = isolate->counters(); |
| 9606 |
9599 bool contextual_load_in_builtin = | 9607 bool contextual_load_in_builtin = |
9600 is_contextual && | 9608 is_contextual && |
9601 (masm()->isolate()->bootstrapper()->IsActive() || | 9609 (isolate->bootstrapper()->IsActive() || |
9602 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); | 9610 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); |
9603 | 9611 |
9604 Result result; | 9612 Result result; |
9605 // Do not inline in the global code or when not in loop. | 9613 // Do not inline in the global code or when not in loop. |
9606 if (scope()->is_global_scope() || | 9614 if (scope()->is_global_scope() || |
9607 loop_nesting() == 0 || | 9615 loop_nesting() == 0 || |
9608 contextual_load_in_builtin) { | 9616 contextual_load_in_builtin) { |
9609 Comment cmnt(masm(), "[ Load from named Property"); | 9617 Comment cmnt(masm(), "[ Load from named Property"); |
9610 frame()->Push(name); | 9618 frame()->Push(name); |
9611 | 9619 |
(...skipping 25 matching lines...) Expand all Loading... |
9637 // Check that the receiver is a heap object. | 9645 // Check that the receiver is a heap object. |
9638 __ test(receiver.reg(), Immediate(kSmiTagMask)); | 9646 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
9639 deferred->Branch(zero); | 9647 deferred->Branch(zero); |
9640 } | 9648 } |
9641 | 9649 |
9642 __ bind(deferred->patch_site()); | 9650 __ bind(deferred->patch_site()); |
9643 // This is the map check instruction that will be patched (so we can't | 9651 // This is the map check instruction that will be patched (so we can't |
9644 // use the double underscore macro that may insert instructions). | 9652 // use the double underscore macro that may insert instructions). |
9645 // Initially use an invalid map to force a failure. | 9653 // Initially use an invalid map to force a failure. |
9646 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), | 9654 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
9647 Immediate(FACTORY->null_value())); | 9655 Immediate(factory->null_value())); |
9648 // This branch is always a forwards branch so it's always a fixed size | 9656 // This branch is always a forwards branch so it's always a fixed size |
9649 // which allows the assert below to succeed and patching to work. | 9657 // which allows the assert below to succeed and patching to work. |
9650 deferred->Branch(not_equal); | 9658 deferred->Branch(not_equal); |
9651 | 9659 |
9652 // The delta from the patch label to the actual load must be | 9660 // The delta from the patch label to the actual load must be |
9653 // statically known. | 9661 // statically known. |
9654 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == | 9662 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == |
9655 LoadIC::kOffsetToLoadInstruction); | 9663 LoadIC::kOffsetToLoadInstruction); |
9656 | 9664 |
9657 if (is_contextual) { | 9665 if (is_contextual) { |
9658 // Load the (initialy invalid) cell and get its value. | 9666 // Load the (initialy invalid) cell and get its value. |
9659 masm()->mov(result.reg(), FACTORY->null_value()); | 9667 masm()->mov(result.reg(), factory->null_value()); |
9660 if (FLAG_debug_code) { | 9668 if (FLAG_debug_code) { |
9661 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset), | 9669 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset), |
9662 FACTORY->global_property_cell_map()); | 9670 factory->global_property_cell_map()); |
9663 __ Assert(equal, "Uninitialized inlined contextual load"); | 9671 __ Assert(equal, "Uninitialized inlined contextual load"); |
9664 } | 9672 } |
9665 __ mov(result.reg(), | 9673 __ mov(result.reg(), |
9666 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset)); | 9674 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset)); |
9667 __ cmp(result.reg(), FACTORY->the_hole_value()); | 9675 __ cmp(result.reg(), factory->the_hole_value()); |
9668 deferred->Branch(equal); | 9676 deferred->Branch(equal); |
9669 bool is_dont_delete = false; | 9677 bool is_dont_delete = false; |
9670 if (!info_->closure().is_null()) { | 9678 if (!info_->closure().is_null()) { |
9671 // When doing lazy compilation we can check if the global cell | 9679 // When doing lazy compilation we can check if the global cell |
9672 // already exists and use its "don't delete" status as a hint. | 9680 // already exists and use its "don't delete" status as a hint. |
9673 AssertNoAllocation no_gc; | 9681 AssertNoAllocation no_gc; |
9674 v8::internal::GlobalObject* global_object = | 9682 v8::internal::GlobalObject* global_object = |
9675 info_->closure()->context()->global(); | 9683 info_->closure()->context()->global(); |
9676 LookupResult lookup; | 9684 LookupResult lookup; |
9677 global_object->LocalLookupRealNamedProperty(*name, &lookup); | 9685 global_object->LocalLookupRealNamedProperty(*name, &lookup); |
9678 if (lookup.IsProperty() && lookup.type() == NORMAL) { | 9686 if (lookup.IsProperty() && lookup.type() == NORMAL) { |
9679 ASSERT(lookup.holder() == global_object); | 9687 ASSERT(lookup.holder() == global_object); |
9680 ASSERT(global_object->property_dictionary()->ValueAt( | 9688 ASSERT(global_object->property_dictionary()->ValueAt( |
9681 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); | 9689 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); |
9682 is_dont_delete = lookup.IsDontDelete(); | 9690 is_dont_delete = lookup.IsDontDelete(); |
9683 } | 9691 } |
9684 } | 9692 } |
9685 deferred->set_is_dont_delete(is_dont_delete); | 9693 deferred->set_is_dont_delete(is_dont_delete); |
9686 if (!is_dont_delete) { | 9694 if (!is_dont_delete) { |
9687 __ cmp(result.reg(), FACTORY->the_hole_value()); | 9695 __ cmp(result.reg(), factory->the_hole_value()); |
9688 deferred->Branch(equal); | 9696 deferred->Branch(equal); |
9689 } else if (FLAG_debug_code) { | 9697 } else if (FLAG_debug_code) { |
9690 __ cmp(result.reg(), FACTORY->the_hole_value()); | 9698 __ cmp(result.reg(), factory->the_hole_value()); |
9691 __ Check(not_equal, "DontDelete cells can't contain the hole"); | 9699 __ Check(not_equal, "DontDelete cells can't contain the hole"); |
9692 } | 9700 } |
9693 __ IncrementCounter(COUNTERS->named_load_global_inline(), 1); | 9701 __ IncrementCounter(counters->named_load_global_inline(), 1); |
9694 if (is_dont_delete) { | 9702 if (is_dont_delete) { |
9695 __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1); | 9703 __ IncrementCounter(counters->dont_delete_hint_hit(), 1); |
9696 } | 9704 } |
9697 } else { | 9705 } else { |
9698 // The initial (invalid) offset has to be large enough to force a 32-bit | 9706 // The initial (invalid) offset has to be large enough to force a 32-bit |
9699 // instruction encoding to allow patching with an arbitrary offset. Use | 9707 // instruction encoding to allow patching with an arbitrary offset. Use |
9700 // kMaxInt (minus kHeapObjectTag). | 9708 // kMaxInt (minus kHeapObjectTag). |
9701 int offset = kMaxInt; | 9709 int offset = kMaxInt; |
9702 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset)); | 9710 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset)); |
9703 __ IncrementCounter(COUNTERS->named_load_inline(), 1); | 9711 __ IncrementCounter(counters->named_load_inline(), 1); |
9704 } | 9712 } |
9705 | 9713 |
9706 deferred->BindExit(); | 9714 deferred->BindExit(); |
9707 } | 9715 } |
9708 ASSERT(frame()->height() == original_height - 1); | 9716 ASSERT(frame()->height() == original_height - 1); |
9709 return result; | 9717 return result; |
9710 } | 9718 } |
9711 | 9719 |
9712 | 9720 |
9713 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { | 9721 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9881 // Key holds a smi. | 9889 // Key holds a smi. |
9882 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 9890 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
9883 __ mov(elements.reg(), | 9891 __ mov(elements.reg(), |
9884 FieldOperand(elements.reg(), | 9892 FieldOperand(elements.reg(), |
9885 key.reg(), | 9893 key.reg(), |
9886 times_2, | 9894 times_2, |
9887 FixedArray::kHeaderSize)); | 9895 FixedArray::kHeaderSize)); |
9888 result = elements; | 9896 result = elements; |
9889 __ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value())); | 9897 __ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value())); |
9890 deferred->Branch(equal); | 9898 deferred->Branch(equal); |
9891 __ IncrementCounter(COUNTERS->keyed_load_inline(), 1); | 9899 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(), 1); |
9892 | 9900 |
9893 deferred->BindExit(); | 9901 deferred->BindExit(); |
9894 } else { | 9902 } else { |
9895 Comment cmnt(masm_, "[ Load from keyed Property"); | 9903 Comment cmnt(masm_, "[ Load from keyed Property"); |
9896 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); | 9904 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); |
9897 // Make sure that we do not have a test instruction after the | 9905 // Make sure that we do not have a test instruction after the |
9898 // call. A test instruction after the call is used to | 9906 // call. A test instruction after the call is used to |
9899 // indicate that we have generated an inline version of the | 9907 // indicate that we have generated an inline version of the |
9900 // keyed load. The explicit nop instruction is here because | 9908 // keyed load. The explicit nop instruction is here because |
9901 // the push that follows might be peep-hole optimized away. | 9909 // the push that follows might be peep-hole optimized away. |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9986 // Check that the key is within bounds. Both the key and the length of | 9994 // Check that the key is within bounds. Both the key and the length of |
9987 // the JSArray are smis (because the fixed array check above ensures the | 9995 // the JSArray are smis (because the fixed array check above ensures the |
9988 // elements are in fast case). Use unsigned comparison to handle negative | 9996 // elements are in fast case). Use unsigned comparison to handle negative |
9989 // keys. | 9997 // keys. |
9990 __ cmp(key.reg(), | 9998 __ cmp(key.reg(), |
9991 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); | 9999 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); |
9992 deferred->Branch(above_equal); | 10000 deferred->Branch(above_equal); |
9993 | 10001 |
9994 // Store the value. | 10002 // Store the value. |
9995 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg()); | 10003 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg()); |
9996 __ IncrementCounter(COUNTERS->keyed_store_inline(), 1); | 10004 __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline(), 1); |
9997 | 10005 |
9998 deferred->BindExit(); | 10006 deferred->BindExit(); |
9999 } else { | 10007 } else { |
10000 result = frame()->CallKeyedStoreIC(strict_mode_flag()); | 10008 result = frame()->CallKeyedStoreIC(strict_mode_flag()); |
10001 // Make sure that we do not have a test instruction after the | 10009 // Make sure that we do not have a test instruction after the |
10002 // call. A test instruction after the call is used to | 10010 // call. A test instruction after the call is used to |
10003 // indicate that we have generated an inline version of the | 10011 // indicate that we have generated an inline version of the |
10004 // keyed store. | 10012 // keyed store. |
10005 __ nop(); | 10013 __ nop(); |
10006 } | 10014 } |
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10217 __ add(Operand(edx), Immediate(16)); | 10225 __ add(Operand(edx), Immediate(16)); |
10218 __ add(dst, Operand(edx)); | 10226 __ add(dst, Operand(edx)); |
10219 __ add(src, Operand(edx)); | 10227 __ add(src, Operand(edx)); |
10220 __ sub(Operand(count), edx); | 10228 __ sub(Operand(count), edx); |
10221 | 10229 |
10222 // edi is now aligned. Check if esi is also aligned. | 10230 // edi is now aligned. Check if esi is also aligned. |
10223 Label unaligned_source; | 10231 Label unaligned_source; |
10224 __ test(Operand(src), Immediate(0x0F)); | 10232 __ test(Operand(src), Immediate(0x0F)); |
10225 __ j(not_zero, &unaligned_source); | 10233 __ j(not_zero, &unaligned_source); |
10226 { | 10234 { |
10227 __ IncrementCounter(COUNTERS->memcopy_aligned(), 1); | 10235 __ IncrementCounter(masm.isolate()->counters()->memcopy_aligned(), 1); |
10228 // Copy loop for aligned source and destination. | 10236 // Copy loop for aligned source and destination. |
10229 __ mov(edx, count); | 10237 __ mov(edx, count); |
10230 Register loop_count = ecx; | 10238 Register loop_count = ecx; |
10231 Register count = edx; | 10239 Register count = edx; |
10232 __ shr(loop_count, 5); | 10240 __ shr(loop_count, 5); |
10233 { | 10241 { |
10234 // Main copy loop. | 10242 // Main copy loop. |
10235 Label loop; | 10243 Label loop; |
10236 __ bind(&loop); | 10244 __ bind(&loop); |
10237 __ prefetch(Operand(src, 0x20), 1); | 10245 __ prefetch(Operand(src, 0x20), 1); |
(...skipping 27 matching lines...) Expand all Loading... |
10265 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); | 10273 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); |
10266 __ pop(esi); | 10274 __ pop(esi); |
10267 __ pop(edi); | 10275 __ pop(edi); |
10268 __ ret(0); | 10276 __ ret(0); |
10269 } | 10277 } |
10270 __ Align(16); | 10278 __ Align(16); |
10271 { | 10279 { |
10272 // Copy loop for unaligned source and aligned destination. | 10280 // Copy loop for unaligned source and aligned destination. |
10273 // If source is not aligned, we can't read it as efficiently. | 10281 // If source is not aligned, we can't read it as efficiently. |
10274 __ bind(&unaligned_source); | 10282 __ bind(&unaligned_source); |
10275 __ IncrementCounter(COUNTERS->memcopy_unaligned(), 1); | 10283 __ IncrementCounter(masm.isolate()->counters()->memcopy_unaligned(), 1); |
10276 __ mov(edx, ecx); | 10284 __ mov(edx, ecx); |
10277 Register loop_count = ecx; | 10285 Register loop_count = ecx; |
10278 Register count = edx; | 10286 Register count = edx; |
10279 __ shr(loop_count, 5); | 10287 __ shr(loop_count, 5); |
10280 { | 10288 { |
10281 // Main copy loop | 10289 // Main copy loop |
10282 Label loop; | 10290 Label loop; |
10283 __ bind(&loop); | 10291 __ bind(&loop); |
10284 __ prefetch(Operand(src, 0x20), 1); | 10292 __ prefetch(Operand(src, 0x20), 1); |
10285 __ movdqu(xmm0, Operand(src, 0x00)); | 10293 __ movdqu(xmm0, Operand(src, 0x00)); |
(...skipping 23 matching lines...) Expand all Loading... |
10309 __ movdqu(xmm0, Operand(src, count, times_1, -0x10)); | 10317 __ movdqu(xmm0, Operand(src, count, times_1, -0x10)); |
10310 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0); | 10318 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0); |
10311 | 10319 |
10312 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); | 10320 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); |
10313 __ pop(esi); | 10321 __ pop(esi); |
10314 __ pop(edi); | 10322 __ pop(edi); |
10315 __ ret(0); | 10323 __ ret(0); |
10316 } | 10324 } |
10317 | 10325 |
10318 } else { | 10326 } else { |
10319 __ IncrementCounter(COUNTERS->memcopy_noxmm(), 1); | 10327 __ IncrementCounter(masm.isolate()->counters()->memcopy_noxmm(), 1); |
10320 // SSE2 not supported. Unlikely to happen in practice. | 10328 // SSE2 not supported. Unlikely to happen in practice. |
10321 __ push(edi); | 10329 __ push(edi); |
10322 __ push(esi); | 10330 __ push(esi); |
10323 stack_offset += 2 * kPointerSize; | 10331 stack_offset += 2 * kPointerSize; |
10324 __ cld(); | 10332 __ cld(); |
10325 Register dst = edi; | 10333 Register dst = edi; |
10326 Register src = esi; | 10334 Register src = esi; |
10327 Register count = ecx; | 10335 Register count = ecx; |
10328 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset)); | 10336 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset)); |
10329 __ mov(src, Operand(esp, stack_offset + kSourceOffset)); | 10337 __ mov(src, Operand(esp, stack_offset + kSourceOffset)); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10370 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size); | 10378 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size); |
10371 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size); | 10379 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size); |
10372 return FUNCTION_CAST<MemCopyFunction>(chunk->GetStartAddress()); | 10380 return FUNCTION_CAST<MemCopyFunction>(chunk->GetStartAddress()); |
10373 } | 10381 } |
10374 | 10382 |
10375 #undef __ | 10383 #undef __ |
10376 | 10384 |
10377 } } // namespace v8::internal | 10385 } } // namespace v8::internal |
10378 | 10386 |
10379 #endif // V8_TARGET_ARCH_IA32 | 10387 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |