| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 4075 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4086 } | 4086 } |
| 4087 } | 4087 } |
| 4088 jmp(gc_required); | 4088 jmp(gc_required); |
| 4089 return; | 4089 return; |
| 4090 } | 4090 } |
| 4091 ASSERT(!result.is(result_end)); | 4091 ASSERT(!result.is(result_end)); |
| 4092 | 4092 |
| 4093 // Load address of new object into result. | 4093 // Load address of new object into result. |
| 4094 LoadAllocationTopHelper(result, scratch, flags); | 4094 LoadAllocationTopHelper(result, scratch, flags); |
| 4095 | 4095 |
| 4096 if (isolate()->heap_profiler()->is_tracking_allocations()) { |
| 4097 RecordObjectAllocation(isolate(), result, object_size); |
| 4098 } |
| 4099 |
| 4096 // Align the next allocation. Storing the filler map without checking top is | 4100 // Align the next allocation. Storing the filler map without checking top is |
| 4097 // safe in new-space because the limit of the heap is aligned there. | 4101 // safe in new-space because the limit of the heap is aligned there. |
| 4098 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 4102 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
| 4099 testq(result, Immediate(kDoubleAlignmentMask)); | 4103 testq(result, Immediate(kDoubleAlignmentMask)); |
| 4100 Check(zero, kAllocationIsNotDoubleAligned); | 4104 Check(zero, kAllocationIsNotDoubleAligned); |
| 4101 } | 4105 } |
| 4102 | 4106 |
| 4103 // Calculate new top and bail out if new space is exhausted. | 4107 // Calculate new top and bail out if new space is exhausted. |
| 4104 ExternalReference allocation_limit = | 4108 ExternalReference allocation_limit = |
| 4105 AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 4109 AllocationUtils::GetAllocationLimitReference(isolate(), flags); |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4165 // object_size is left unchanged by this function. | 4169 // object_size is left unchanged by this function. |
| 4166 } | 4170 } |
| 4167 jmp(gc_required); | 4171 jmp(gc_required); |
| 4168 return; | 4172 return; |
| 4169 } | 4173 } |
| 4170 ASSERT(!result.is(result_end)); | 4174 ASSERT(!result.is(result_end)); |
| 4171 | 4175 |
| 4172 // Load address of new object into result. | 4176 // Load address of new object into result. |
| 4173 LoadAllocationTopHelper(result, scratch, flags); | 4177 LoadAllocationTopHelper(result, scratch, flags); |
| 4174 | 4178 |
| 4179 if (isolate()->heap_profiler()->is_tracking_allocations()) { |
| 4180 RecordObjectAllocation(isolate(), result, object_size); |
| 4181 } |
| 4182 |
| 4175 // Align the next allocation. Storing the filler map without checking top is | 4183 // Align the next allocation. Storing the filler map without checking top is |
| 4176 // safe in new-space because the limit of the heap is aligned there. | 4184 // safe in new-space because the limit of the heap is aligned there. |
| 4177 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 4185 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
| 4178 testq(result, Immediate(kDoubleAlignmentMask)); | 4186 testq(result, Immediate(kDoubleAlignmentMask)); |
| 4179 Check(zero, kAllocationIsNotDoubleAligned); | 4187 Check(zero, kAllocationIsNotDoubleAligned); |
| 4180 } | 4188 } |
| 4181 | 4189 |
| 4182 // Calculate new top and bail out if new space is exhausted. | 4190 // Calculate new top and bail out if new space is exhausted. |
| 4183 ExternalReference allocation_limit = | 4191 ExternalReference allocation_limit = |
| 4184 AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 4192 AllocationUtils::GetAllocationLimitReference(isolate(), flags); |
| (...skipping 741 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4926 cmpq(scratch_reg, kScratchRegister); | 4934 cmpq(scratch_reg, kScratchRegister); |
| 4927 j(less, &no_memento_available); | 4935 j(less, &no_memento_available); |
| 4928 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); | 4936 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); |
| 4929 j(greater, &no_memento_available); | 4937 j(greater, &no_memento_available); |
| 4930 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4938 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
| 4931 Heap::kAllocationMementoMapRootIndex); | 4939 Heap::kAllocationMementoMapRootIndex); |
| 4932 bind(&no_memento_available); | 4940 bind(&no_memento_available); |
| 4933 } | 4941 } |
| 4934 | 4942 |
| 4935 | 4943 |
| 4944 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, |
| 4945 Register object, |
| 4946 Register object_size) { |
| 4947 FrameScope frame(this, StackFrame::EXIT); |
| 4948 PushSafepointRegisters(); |
| 4949 PrepareCallCFunction(3); |
| 4950 // In case object is rdx |
| 4951 movq(kScratchRegister, object); |
| 4952 movq(arg_reg_3, object_size); |
| 4953 movq(arg_reg_2, kScratchRegister); |
| 4954 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); |
| 4955 CallCFunction( |
| 4956 ExternalReference::record_object_allocation_function(isolate), 3); |
| 4957 PopSafepointRegisters(); |
| 4958 } |
| 4959 |
| 4960 |
| 4961 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, |
| 4962 Register object, |
| 4963 int object_size) { |
| 4964 FrameScope frame(this, StackFrame::EXIT); |
| 4965 PushSafepointRegisters(); |
| 4966 PrepareCallCFunction(3); |
| 4967 movq(arg_reg_2, object); |
| 4968 movq(arg_reg_3, Immediate(object_size)); |
| 4969 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); |
| 4970 CallCFunction( |
| 4971 ExternalReference::record_object_allocation_function(isolate), 3); |
| 4972 PopSafepointRegisters(); |
| 4973 } |
| 4974 |
| 4975 |
| 4936 } } // namespace v8::internal | 4976 } } // namespace v8::internal |
| 4937 | 4977 |
| 4938 #endif // V8_TARGET_ARCH_X64 | 4978 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |