Index: runtime/vm/stub_code_x64.cc |
diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc |
index 4fe0d9bc63dce897a7220b1ce6a9ad07ba1c88b4..93d5c5996d5288ffdf2da06f708f17e1050d3ffc 100644 |
--- a/runtime/vm/stub_code_x64.cc |
+++ b/runtime/vm/stub_code_x64.cc |
@@ -848,6 +848,11 @@ void StubCode::GenerateAllocateContextStub(Assembler* assembler) { |
__ leaq(R13, Address(R10, TIMES_8, fixed_size)); |
__ andq(R13, Immediate(-kObjectAlignment)); |
+ // Check for allocation tracing. |
+ __ MaybeTraceAllocation(kContextCid, |
+ &slow_case, |
+ /* near_jump = */ false, |
Florian Schneider
2015/08/14 11:35:09
Use Assembler::kFarJump instead.
Cutch
2015/08/14 13:54:36
Done here and elsewhere.
|
+ /* inline_isolate = */ false); |
// Now allocate the object. |
// R10: number of context variables. |
const intptr_t cid = kContextCid; |
@@ -1055,7 +1060,7 @@ void StubCode::GenerateAllocationStubForClass( |
// RDX: instantiated type arguments. |
} |
if (FLAG_inline_alloc && Heap::IsAllocatableInNewSpace(instance_size) && |
- !cls.trace_allocation()) { |
+ !cls.TraceAllocation()) { |
Label slow_case; |
// Allocate the object and update top to point to |
// next object start and initialize the allocated object. |