OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5549 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5560 | 5560 |
5561 | 5561 |
5562 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5562 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5563 Register object = ToRegister(instr->value()); | 5563 Register object = ToRegister(instr->value()); |
5564 __ cmpp(ToRegister(instr->map()), | 5564 __ cmpp(ToRegister(instr->map()), |
5565 FieldOperand(object, HeapObject::kMapOffset)); | 5565 FieldOperand(object, HeapObject::kMapOffset)); |
5566 DeoptimizeIf(not_equal, instr->environment()); | 5566 DeoptimizeIf(not_equal, instr->environment()); |
5567 } | 5567 } |
5568 | 5568 |
5569 | 5569 |
| 5570 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 5571 Register object, |
| 5572 Register index) { |
| 5573 PushSafepointRegistersScope scope(this); |
| 5574 __ Push(object); |
| 5575 __ Push(index); |
| 5576 __ xorp(rsi, rsi); |
| 5577 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); |
| 5578 RecordSafepointWithRegisters( |
| 5579 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt); |
| 5580 __ StoreToSafepointRegisterSlot(object, rax); |
| 5581 } |
| 5582 |
| 5583 |
5570 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5584 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 5585 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { |
| 5586 public: |
| 5587 DeferredLoadMutableDouble(LCodeGen* codegen, |
| 5588 LLoadFieldByIndex* instr, |
| 5589 Register object, |
| 5590 Register index) |
| 5591 : LDeferredCode(codegen), |
| 5592 instr_(instr), |
| 5593 object_(object), |
| 5594 index_(index) { |
| 5595 } |
| 5596 virtual void Generate() V8_OVERRIDE { |
| 5597 codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_); |
| 5598 } |
| 5599 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 5600 private: |
| 5601 LLoadFieldByIndex* instr_; |
| 5602 Register object_; |
| 5603 Register index_; |
| 5604 }; |
| 5605 |
5571 Register object = ToRegister(instr->object()); | 5606 Register object = ToRegister(instr->object()); |
5572 Register index = ToRegister(instr->index()); | 5607 Register index = ToRegister(instr->index()); |
5573 | 5608 |
| 5609 DeferredLoadMutableDouble* deferred; |
| 5610 deferred = new(zone()) DeferredLoadMutableDouble(this, instr, object, index); |
| 5611 |
5574 Label out_of_object, done; | 5612 Label out_of_object, done; |
| 5613 __ Move(kScratchRegister, Smi::FromInt(1)); |
| 5614 __ testq(index, kScratchRegister); |
| 5615 __ j(not_zero, deferred->entry()); |
| 5616 |
| 5617 __ sar(index, Immediate(1)); |
| 5618 |
5575 __ SmiToInteger32(index, index); | 5619 __ SmiToInteger32(index, index); |
5576 __ cmpl(index, Immediate(0)); | 5620 __ cmpl(index, Immediate(0)); |
5577 __ j(less, &out_of_object, Label::kNear); | 5621 __ j(less, &out_of_object, Label::kNear); |
5578 __ movp(object, FieldOperand(object, | 5622 __ movp(object, FieldOperand(object, |
5579 index, | 5623 index, |
5580 times_pointer_size, | 5624 times_pointer_size, |
5581 JSObject::kHeaderSize)); | 5625 JSObject::kHeaderSize)); |
5582 __ jmp(&done, Label::kNear); | 5626 __ jmp(&done, Label::kNear); |
5583 | 5627 |
5584 __ bind(&out_of_object); | 5628 __ bind(&out_of_object); |
5585 __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 5629 __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
5586 __ negl(index); | 5630 __ negl(index); |
5587 // Index is now equal to out of object property index plus 1. | 5631 // Index is now equal to out of object property index plus 1. |
5588 __ movp(object, FieldOperand(object, | 5632 __ movp(object, FieldOperand(object, |
5589 index, | 5633 index, |
5590 times_pointer_size, | 5634 times_pointer_size, |
5591 FixedArray::kHeaderSize - kPointerSize)); | 5635 FixedArray::kHeaderSize - kPointerSize)); |
| 5636 __ bind(deferred->exit()); |
5592 __ bind(&done); | 5637 __ bind(&done); |
5593 } | 5638 } |
5594 | 5639 |
5595 | 5640 |
5596 #undef __ | 5641 #undef __ |
5597 | 5642 |
5598 } } // namespace v8::internal | 5643 } } // namespace v8::internal |
5599 | 5644 |
5600 #endif // V8_TARGET_ARCH_X64 | 5645 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |