OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4072 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4083 } | 4083 } |
4084 jmp(gc_required); | 4084 jmp(gc_required); |
4085 return; | 4085 return; |
4086 } | 4086 } |
4087 ASSERT(!result.is(result_end)); | 4087 ASSERT(!result.is(result_end)); |
4088 | 4088 |
4089 // Load address of new object into result. | 4089 // Load address of new object into result. |
4090 LoadAllocationTopHelper(result, scratch, flags); | 4090 LoadAllocationTopHelper(result, scratch, flags); |
4091 | 4091 |
4092 if (isolate()->heap_profiler()->is_tracking_allocations()) { | 4092 if (isolate()->heap_profiler()->is_tracking_allocations()) { |
4093 RecordObjectAllocation(isolate(), result, object_size); | 4093 push(Immediate(object_size)); |
| 4094 push(result); |
| 4095 RecordObjectAllocationStub stub; |
| 4096 CallStub(&stub); |
| 4097 addq(rsp, Immediate(2 * kPointerSize)); |
4094 } | 4098 } |
4095 | 4099 |
4096 // Align the next allocation. Storing the filler map without checking top is | 4100 // Align the next allocation. Storing the filler map without checking top is |
4097 // safe in new-space because the limit of the heap is aligned there. | 4101 // safe in new-space because the limit of the heap is aligned there. |
4098 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 4102 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
4099 testq(result, Immediate(kDoubleAlignmentMask)); | 4103 testq(result, Immediate(kDoubleAlignmentMask)); |
4100 Check(zero, kAllocationIsNotDoubleAligned); | 4104 Check(zero, kAllocationIsNotDoubleAligned); |
4101 } | 4105 } |
4102 | 4106 |
4103 // Calculate new top and bail out if new space is exhausted. | 4107 // Calculate new top and bail out if new space is exhausted. |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4166 } | 4170 } |
4167 jmp(gc_required); | 4171 jmp(gc_required); |
4168 return; | 4172 return; |
4169 } | 4173 } |
4170 ASSERT(!result.is(result_end)); | 4174 ASSERT(!result.is(result_end)); |
4171 | 4175 |
4172 // Load address of new object into result. | 4176 // Load address of new object into result. |
4173 LoadAllocationTopHelper(result, scratch, flags); | 4177 LoadAllocationTopHelper(result, scratch, flags); |
4174 | 4178 |
4175 if (isolate()->heap_profiler()->is_tracking_allocations()) { | 4179 if (isolate()->heap_profiler()->is_tracking_allocations()) { |
4176 RecordObjectAllocation(isolate(), result, object_size); | 4180 push(object_size); |
| 4181 push(result); |
| 4182 RecordObjectAllocationStub stub; |
| 4183 CallStub(&stub); |
| 4184 addq(rsp, Immediate(2 * kRegisterSize)); |
4177 } | 4185 } |
4178 | 4186 |
4179 // Align the next allocation. Storing the filler map without checking top is | 4187 // Align the next allocation. Storing the filler map without checking top is |
4180 // safe in new-space because the limit of the heap is aligned there. | 4188 // safe in new-space because the limit of the heap is aligned there. |
4181 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 4189 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
4182 testq(result, Immediate(kDoubleAlignmentMask)); | 4190 testq(result, Immediate(kDoubleAlignmentMask)); |
4183 Check(zero, kAllocationIsNotDoubleAligned); | 4191 Check(zero, kAllocationIsNotDoubleAligned); |
4184 } | 4192 } |
4185 | 4193 |
4186 // Calculate new top and bail out if new space is exhausted. | 4194 // Calculate new top and bail out if new space is exhausted. |
(...skipping 742 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4929 movq(kScratchRegister, new_space_start); | 4937 movq(kScratchRegister, new_space_start); |
4930 cmpq(scratch_reg, kScratchRegister); | 4938 cmpq(scratch_reg, kScratchRegister); |
4931 j(less, no_memento_found); | 4939 j(less, no_memento_found); |
4932 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); | 4940 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); |
4933 j(greater, no_memento_found); | 4941 j(greater, no_memento_found); |
4934 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4942 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4935 Heap::kAllocationMementoMapRootIndex); | 4943 Heap::kAllocationMementoMapRootIndex); |
4936 } | 4944 } |
4937 | 4945 |
4938 | 4946 |
4939 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, | |
4940 Register object, | |
4941 Register object_size) { | |
4942 FrameScope frame(this, StackFrame::EXIT); | |
4943 PushSafepointRegisters(); | |
4944 PrepareCallCFunction(3); | |
4945 // In case object is rdx | |
4946 movq(kScratchRegister, object); | |
4947 movq(arg_reg_3, object_size); | |
4948 movq(arg_reg_2, kScratchRegister); | |
4949 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | |
4950 CallCFunction( | |
4951 ExternalReference::record_object_allocation_function(isolate), 3); | |
4952 PopSafepointRegisters(); | |
4953 } | |
4954 | |
4955 | |
4956 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, | |
4957 Register object, | |
4958 int object_size) { | |
4959 FrameScope frame(this, StackFrame::EXIT); | |
4960 PushSafepointRegisters(); | |
4961 PrepareCallCFunction(3); | |
4962 movq(arg_reg_2, object); | |
4963 movq(arg_reg_3, Immediate(object_size)); | |
4964 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | |
4965 CallCFunction( | |
4966 ExternalReference::record_object_allocation_function(isolate), 3); | |
4967 PopSafepointRegisters(); | |
4968 } | |
4969 | |
4970 | |
4971 } } // namespace v8::internal | 4947 } } // namespace v8::internal |
4972 | 4948 |
4973 #endif // V8_TARGET_ARCH_X64 | 4949 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |