Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(802)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 43693002: Correctly setup exit frame when calling into allocation tracker (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Added AllowStubCallsScope Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 4073 matching lines...) Expand 10 before | Expand all | Expand 10 after
4084 } 4084 }
4085 jmp(gc_required); 4085 jmp(gc_required);
4086 return; 4086 return;
4087 } 4087 }
4088 ASSERT(!result.is(result_end)); 4088 ASSERT(!result.is(result_end));
4089 4089
4090 // Load address of new object into result. 4090 // Load address of new object into result.
4091 LoadAllocationTopHelper(result, scratch, flags); 4091 LoadAllocationTopHelper(result, scratch, flags);
4092 4092
4093 if (isolate()->heap_profiler()->is_tracking_allocations()) { 4093 if (isolate()->heap_profiler()->is_tracking_allocations()) {
4094 RecordObjectAllocation(isolate(), result, object_size); 4094 AllowStubCallsScope allow_stub_calls(this, true);
4095 RecordObjectAllocationStub stub;
4096 push(Immediate(object_size));
4097 push(result);
4098 CallStub(&stub);
4099 Drop(2);
4095 } 4100 }
4096 4101
4097 // Align the next allocation. Storing the filler map without checking top is 4102 // Align the next allocation. Storing the filler map without checking top is
4098 // safe in new-space because the limit of the heap is aligned there. 4103 // safe in new-space because the limit of the heap is aligned there.
4099 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 4104 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
4100 testq(result, Immediate(kDoubleAlignmentMask)); 4105 testq(result, Immediate(kDoubleAlignmentMask));
4101 Check(zero, kAllocationIsNotDoubleAligned); 4106 Check(zero, kAllocationIsNotDoubleAligned);
4102 } 4107 }
4103 4108
4104 // Calculate new top and bail out if new space is exhausted. 4109 // Calculate new top and bail out if new space is exhausted.
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
4167 } 4172 }
4168 jmp(gc_required); 4173 jmp(gc_required);
4169 return; 4174 return;
4170 } 4175 }
4171 ASSERT(!result.is(result_end)); 4176 ASSERT(!result.is(result_end));
4172 4177
4173 // Load address of new object into result. 4178 // Load address of new object into result.
4174 LoadAllocationTopHelper(result, scratch, flags); 4179 LoadAllocationTopHelper(result, scratch, flags);
4175 4180
4176 if (isolate()->heap_profiler()->is_tracking_allocations()) { 4181 if (isolate()->heap_profiler()->is_tracking_allocations()) {
4177 RecordObjectAllocation(isolate(), result, object_size); 4182 AllowStubCallsScope allow_stub_calls(this, true);
4183 RecordObjectAllocationStub stub;
4184 push(object_size);
4185 push(result);
4186 CallStub(&stub);
4187 Drop(2);
4178 } 4188 }
4179 4189
4180 // Align the next allocation. Storing the filler map without checking top is 4190 // Align the next allocation. Storing the filler map without checking top is
4181 // safe in new-space because the limit of the heap is aligned there. 4191 // safe in new-space because the limit of the heap is aligned there.
4182 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 4192 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
4183 testq(result, Immediate(kDoubleAlignmentMask)); 4193 testq(result, Immediate(kDoubleAlignmentMask));
4184 Check(zero, kAllocationIsNotDoubleAligned); 4194 Check(zero, kAllocationIsNotDoubleAligned);
4185 } 4195 }
4186 4196
4187 // Calculate new top and bail out if new space is exhausted. 4197 // Calculate new top and bail out if new space is exhausted.
(...skipping 742 matching lines...) Expand 10 before | Expand all | Expand 10 after
4930 movq(kScratchRegister, new_space_start); 4940 movq(kScratchRegister, new_space_start);
4931 cmpq(scratch_reg, kScratchRegister); 4941 cmpq(scratch_reg, kScratchRegister);
4932 j(less, no_memento_found); 4942 j(less, no_memento_found);
4933 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); 4943 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top));
4934 j(greater, no_memento_found); 4944 j(greater, no_memento_found);
4935 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), 4945 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4936 Heap::kAllocationMementoMapRootIndex); 4946 Heap::kAllocationMementoMapRootIndex);
4937 } 4947 }
4938 4948
4939 4949
4940 void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
4941 Register object,
4942 Register object_size) {
4943 FrameScope frame(this, StackFrame::EXIT);
4944 PushSafepointRegisters();
4945 PrepareCallCFunction(3);
4946 // In case object is rdx
4947 movq(kScratchRegister, object);
4948 movq(arg_reg_3, object_size);
4949 movq(arg_reg_2, kScratchRegister);
4950 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
4951 CallCFunction(
4952 ExternalReference::record_object_allocation_function(isolate), 3);
4953 PopSafepointRegisters();
4954 }
4955
4956
4957 void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
4958 Register object,
4959 int object_size) {
4960 FrameScope frame(this, StackFrame::EXIT);
4961 PushSafepointRegisters();
4962 PrepareCallCFunction(3);
4963 movq(arg_reg_2, object);
4964 movq(arg_reg_3, Immediate(object_size));
4965 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
4966 CallCFunction(
4967 ExternalReference::record_object_allocation_function(isolate), 3);
4968 PopSafepointRegisters();
4969 }
4970
4971
4972 } } // namespace v8::internal 4950 } } // namespace v8::internal
4973 4951
4974 #endif // V8_TARGET_ARCH_X64 4952 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698