OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1657 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1668 __ cmpq(kScratchRegister, FieldOperand(object, | 1668 __ cmpq(kScratchRegister, FieldOperand(object, |
1669 JSDate::kCacheStampOffset)); | 1669 JSDate::kCacheStampOffset)); |
1670 __ j(not_equal, &runtime, Label::kNear); | 1670 __ j(not_equal, &runtime, Label::kNear); |
1671 __ movq(result, FieldOperand(object, JSDate::kValueOffset + | 1671 __ movq(result, FieldOperand(object, JSDate::kValueOffset + |
1672 kPointerSize * index->value())); | 1672 kPointerSize * index->value())); |
1673 __ jmp(&done, Label::kNear); | 1673 __ jmp(&done, Label::kNear); |
1674 } | 1674 } |
1675 __ bind(&runtime); | 1675 __ bind(&runtime); |
1676 __ PrepareCallCFunction(2); | 1676 __ PrepareCallCFunction(2); |
1677 __ movq(arg_reg_1, object); | 1677 __ movq(arg_reg_1, object); |
1678 __ movq(arg_reg_2, index, RelocInfo::NONE64); | 1678 __ Move(arg_reg_2, index, RelocInfo::NONE64); |
1679 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1679 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
1680 __ bind(&done); | 1680 __ bind(&done); |
1681 } | 1681 } |
1682 } | 1682 } |
1683 | 1683 |
1684 | 1684 |
1685 Operand LCodeGen::BuildSeqStringOperand(Register string, | 1685 Operand LCodeGen::BuildSeqStringOperand(Register string, |
1686 LOperand* index, | 1686 LOperand* index, |
1687 String::Encoding encoding) { | 1687 String::Encoding encoding) { |
1688 if (index->IsConstantOperand()) { | 1688 if (index->IsConstantOperand()) { |
(...skipping 900 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2589 | 2589 |
2590 // This is the inlined call site instanceof cache. The two occurences of the | 2590 // This is the inlined call site instanceof cache. The two occurences of the |
2591 // hole value will be patched to the last map/result pair generated by the | 2591 // hole value will be patched to the last map/result pair generated by the |
2592 // instanceof stub. | 2592 // instanceof stub. |
2593 Label cache_miss; | 2593 Label cache_miss; |
2594 // Use a temp register to avoid memory operands with variable lengths. | 2594 // Use a temp register to avoid memory operands with variable lengths. |
2595 Register map = ToRegister(instr->temp()); | 2595 Register map = ToRegister(instr->temp()); |
2596 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); | 2596 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); |
2597 __ bind(deferred->map_check()); // Label for calculating code patching. | 2597 __ bind(deferred->map_check()); // Label for calculating code patching. |
2598 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | 2598 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
2599 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); | 2599 __ Move(kScratchRegister, cache_cell, RelocInfo::CELL); |
2600 __ cmpq(map, Operand(kScratchRegister, 0)); | 2600 __ cmpq(map, Operand(kScratchRegister, 0)); |
2601 __ j(not_equal, &cache_miss, Label::kNear); | 2601 __ j(not_equal, &cache_miss, Label::kNear); |
2602 // Patched to load either true or false. | 2602 // Patched to load either true or false. |
2603 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); | 2603 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); |
2604 #ifdef DEBUG | 2604 #ifdef DEBUG |
2605 // Check that the code size between patch label and patch sites is invariant. | 2605 // Check that the code size between patch label and patch sites is invariant. |
2606 Label end_of_patched_code; | 2606 Label end_of_patched_code; |
2607 __ bind(&end_of_patched_code); | 2607 __ bind(&end_of_patched_code); |
2608 ASSERT(true); | 2608 ASSERT(true); |
2609 #endif | 2609 #endif |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2756 Handle<Cell> cell_handle = instr->hydrogen()->cell().handle(); | 2756 Handle<Cell> cell_handle = instr->hydrogen()->cell().handle(); |
2757 | 2757 |
2758 // If the cell we are storing to contains the hole it could have | 2758 // If the cell we are storing to contains the hole it could have |
2759 // been deleted from the property dictionary. In that case, we need | 2759 // been deleted from the property dictionary. In that case, we need |
2760 // to update the property details in the property dictionary to mark | 2760 // to update the property details in the property dictionary to mark |
2761 // it as no longer deleted. We deoptimize in that case. | 2761 // it as no longer deleted. We deoptimize in that case. |
2762 if (instr->hydrogen()->RequiresHoleCheck()) { | 2762 if (instr->hydrogen()->RequiresHoleCheck()) { |
2763 // We have a temp because CompareRoot might clobber kScratchRegister. | 2763 // We have a temp because CompareRoot might clobber kScratchRegister. |
2764 Register cell = ToRegister(instr->temp()); | 2764 Register cell = ToRegister(instr->temp()); |
2765 ASSERT(!value.is(cell)); | 2765 ASSERT(!value.is(cell)); |
2766 __ movq(cell, cell_handle, RelocInfo::CELL); | 2766 __ Move(cell, cell_handle, RelocInfo::CELL); |
2767 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); | 2767 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); |
2768 DeoptimizeIf(equal, instr->environment()); | 2768 DeoptimizeIf(equal, instr->environment()); |
2769 // Store the value. | 2769 // Store the value. |
2770 __ movq(Operand(cell, 0), value); | 2770 __ movq(Operand(cell, 0), value); |
2771 } else { | 2771 } else { |
2772 // Store the value. | 2772 // Store the value. |
2773 __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); | 2773 __ Move(kScratchRegister, cell_handle, RelocInfo::CELL); |
2774 __ movq(Operand(kScratchRegister, 0), value); | 2774 __ movq(Operand(kScratchRegister, 0), value); |
2775 } | 2775 } |
2776 // Cells are always rescanned, so no write barrier here. | 2776 // Cells are always rescanned, so no write barrier here. |
2777 } | 2777 } |
2778 | 2778 |
2779 | 2779 |
2780 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { | 2780 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
2781 ASSERT(ToRegister(instr->context()).is(rsi)); | 2781 ASSERT(ToRegister(instr->context()).is(rsi)); |
2782 ASSERT(ToRegister(instr->global_object()).is(rdx)); | 2782 ASSERT(ToRegister(instr->global_object()).is(rdx)); |
2783 ASSERT(ToRegister(instr->value()).is(rax)); | 2783 ASSERT(ToRegister(instr->value()).is(rax)); |
(...skipping 1571 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4355 Handle<Map> from_map = instr->original_map(); | 4355 Handle<Map> from_map = instr->original_map(); |
4356 Handle<Map> to_map = instr->transitioned_map(); | 4356 Handle<Map> to_map = instr->transitioned_map(); |
4357 ElementsKind from_kind = instr->from_kind(); | 4357 ElementsKind from_kind = instr->from_kind(); |
4358 ElementsKind to_kind = instr->to_kind(); | 4358 ElementsKind to_kind = instr->to_kind(); |
4359 | 4359 |
4360 Label not_applicable; | 4360 Label not_applicable; |
4361 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); | 4361 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); |
4362 __ j(not_equal, ¬_applicable); | 4362 __ j(not_equal, ¬_applicable); |
4363 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 4363 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
4364 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4364 Register new_map_reg = ToRegister(instr->new_map_temp()); |
4365 __ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); | 4365 __ Move(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); |
4366 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); | 4366 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); |
4367 // Write barrier. | 4367 // Write barrier. |
4368 ASSERT_NE(instr->temp(), NULL); | 4368 ASSERT_NE(instr->temp(), NULL); |
4369 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4369 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
4370 ToRegister(instr->temp()), kDontSaveFPRegs); | 4370 ToRegister(instr->temp()), kDontSaveFPRegs); |
4371 } else { | 4371 } else { |
4372 ASSERT(ToRegister(instr->context()).is(rsi)); | 4372 ASSERT(ToRegister(instr->context()).is(rsi)); |
4373 PushSafepointRegistersScope scope(this); | 4373 PushSafepointRegistersScope scope(this); |
4374 if (!object_reg.is(rax)) { | 4374 if (!object_reg.is(rax)) { |
4375 __ movq(rax, object_reg); | 4375 __ movq(rax, object_reg); |
(...skipping 1269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5645 FixedArray::kHeaderSize - kPointerSize)); | 5645 FixedArray::kHeaderSize - kPointerSize)); |
5646 __ bind(&done); | 5646 __ bind(&done); |
5647 } | 5647 } |
5648 | 5648 |
5649 | 5649 |
5650 #undef __ | 5650 #undef __ |
5651 | 5651 |
5652 } } // namespace v8::internal | 5652 } } // namespace v8::internal |
5653 | 5653 |
5654 #endif // V8_TARGET_ARCH_X64 | 5654 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |