OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5557 | 5557 |
5558 | 5558 |
5559 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5559 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
5560 Register object = ToRegister(instr->value()); | 5560 Register object = ToRegister(instr->value()); |
5561 __ cmpp(ToRegister(instr->map()), | 5561 __ cmpp(ToRegister(instr->map()), |
5562 FieldOperand(object, HeapObject::kMapOffset)); | 5562 FieldOperand(object, HeapObject::kMapOffset)); |
5563 DeoptimizeIf(not_equal, instr->environment()); | 5563 DeoptimizeIf(not_equal, instr->environment()); |
5564 } | 5564 } |
5565 | 5565 |
5566 | 5566 |
| 5567 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 5568 Register object, |
| 5569 Register index) { |
| 5570 PushSafepointRegistersScope scope(this); |
| 5571 __ Push(object); |
| 5572 __ Push(index); |
| 5573 __ xorp(rsi, rsi); |
| 5574 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); |
| 5575 RecordSafepointWithRegisters( |
| 5576 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); |
| 5577 __ StoreToSafepointRegisterSlot(object, rax); |
| 5578 } |
| 5579 |
| 5580 |
5567 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5581 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 5582 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { |
| 5583 public: |
| 5584 DeferredLoadMutableDouble(LCodeGen* codegen, |
| 5585 LLoadFieldByIndex* instr, |
| 5586 Register object, |
| 5587 Register index) |
| 5588 : LDeferredCode(codegen), |
| 5589 instr_(instr), |
| 5590 object_(object), |
| 5591 index_(index) { |
| 5592 } |
| 5593 virtual void Generate() V8_OVERRIDE { |
| 5594 codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_); |
| 5595 } |
| 5596 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 5597 private: |
| 5598 LLoadFieldByIndex* instr_; |
| 5599 Register object_; |
| 5600 Register index_; |
| 5601 }; |
| 5602 |
5568 Register object = ToRegister(instr->object()); | 5603 Register object = ToRegister(instr->object()); |
5569 Register index = ToRegister(instr->index()); | 5604 Register index = ToRegister(instr->index()); |
5570 | 5605 |
| 5606 DeferredLoadMutableDouble* deferred; |
| 5607 deferred = new(zone()) DeferredLoadMutableDouble(this, instr, object, index); |
| 5608 |
5571 Label out_of_object, done; | 5609 Label out_of_object, done; |
| 5610 __ Move(kScratchRegister, Smi::FromInt(1)); |
| 5611 __ testp(index, kScratchRegister); |
| 5612 __ j(not_zero, deferred->entry()); |
| 5613 |
| 5614 __ sarp(index, Immediate(1)); |
| 5615 |
5572 __ SmiToInteger32(index, index); | 5616 __ SmiToInteger32(index, index); |
5573 __ cmpl(index, Immediate(0)); | 5617 __ cmpl(index, Immediate(0)); |
5574 __ j(less, &out_of_object, Label::kNear); | 5618 __ j(less, &out_of_object, Label::kNear); |
5575 __ movp(object, FieldOperand(object, | 5619 __ movp(object, FieldOperand(object, |
5576 index, | 5620 index, |
5577 times_pointer_size, | 5621 times_pointer_size, |
5578 JSObject::kHeaderSize)); | 5622 JSObject::kHeaderSize)); |
5579 __ jmp(&done, Label::kNear); | 5623 __ jmp(&done, Label::kNear); |
5580 | 5624 |
5581 __ bind(&out_of_object); | 5625 __ bind(&out_of_object); |
5582 __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 5626 __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
5583 __ negl(index); | 5627 __ negl(index); |
5584 // Index is now equal to out of object property index plus 1. | 5628 // Index is now equal to out of object property index plus 1. |
5585 __ movp(object, FieldOperand(object, | 5629 __ movp(object, FieldOperand(object, |
5586 index, | 5630 index, |
5587 times_pointer_size, | 5631 times_pointer_size, |
5588 FixedArray::kHeaderSize - kPointerSize)); | 5632 FixedArray::kHeaderSize - kPointerSize)); |
| 5633 __ bind(deferred->exit()); |
5589 __ bind(&done); | 5634 __ bind(&done); |
5590 } | 5635 } |
5591 | 5636 |
5592 | 5637 |
5593 #undef __ | 5638 #undef __ |
5594 | 5639 |
5595 } } // namespace v8::internal | 5640 } } // namespace v8::internal |
5596 | 5641 |
5597 #endif // V8_TARGET_ARCH_X64 | 5642 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |