Index: runtime/vm/assembler_x64.cc |
diff --git a/runtime/vm/assembler_x64.cc b/runtime/vm/assembler_x64.cc |
index 45a0754cde9852e760755f48f93c902e2061bc8f..3fb782b2a43748684c4e6e81fc296e5477c7cd77 100644 |
--- a/runtime/vm/assembler_x64.cc |
+++ b/runtime/vm/assembler_x64.cc |
@@ -22,7 +22,7 @@ DECLARE_FLAG(bool, inline_alloc); |
Assembler::Assembler(bool use_far_branches) |
- : buffer_(), |
+ : buffer_(this), |
prologue_offset_(-1), |
comments_(), |
allow_constant_pool_(true) { |
@@ -33,29 +33,27 @@ Assembler::Assembler(bool use_far_branches) |
// These objects and labels need to be accessible through every pool-pointer |
// at the same index. |
intptr_t index = |
- object_pool_.AddObject(Object::null_object(), kNotPatchable); |
+ object_pool_.AddFixedObject(Object::null_object()); |
Florian Schneider
2015/05/26 08:51:51
I'm considering removing all fixed objects / stubs
srdjan
2015/05/26 22:36:26
Where would you load them from?
|
ASSERT(index == 0); |
- index = object_pool_.AddObject(Bool::True(), kNotPatchable); |
+ index = object_pool_.AddFixedObject(Bool::True()); |
ASSERT(index == 1); |
- index = object_pool_.AddObject(Bool::False(), kNotPatchable); |
+ index = object_pool_.AddFixedObject(Bool::False()); |
ASSERT(index == 2); |
- const Smi& vacant = Smi::Handle(Smi::New(0xfa >> kSmiTagShift)); |
+ const Smi& vacant = Smi::ZoneHandle(Smi::New(0xfa >> kSmiTagShift)); |
StubCode* stub_code = isolate->stub_code(); |
if (stub_code->UpdateStoreBuffer_entry() != NULL) { |
- object_pool_.AddExternalLabel(&stub_code->UpdateStoreBufferLabel(), |
- kNotPatchable); |
+ object_pool_.AddFixedExternalLabel(&stub_code->UpdateStoreBufferLabel()); |
} else { |
- object_pool_.AddObject(vacant, kNotPatchable); |
+ object_pool_.AddFixedObject(vacant); |
} |
if (stub_code->CallToRuntime_entry() != NULL) { |
- object_pool_.AddExternalLabel(&stub_code->CallToRuntimeLabel(), |
- kNotPatchable); |
+ object_pool_.AddFixedExternalLabel(&stub_code->CallToRuntimeLabel()); |
} else { |
- object_pool_.AddObject(vacant, kNotPatchable); |
+ object_pool_.AddFixedObject(vacant); |
} |
} |
} |
@@ -98,6 +96,7 @@ void Assembler::LoadExternalLabel(Register dst, |
const int32_t offset = |
Array::element_offset(object_pool_.FindExternalLabel(label, patchable)); |
LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); |
+ EmitFixup(new ObjectPoolIndexFixup()); |
} |
@@ -119,16 +118,20 @@ void Assembler::CallPatchable(const ExternalLabel* label) { |
Array::element_offset(object_pool_.FindExternalLabel(label, kPatchable)); |
call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag)); |
ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
+ EmitFixup(new ObjectPoolIndexFixup()); |
} |
-void Assembler::Call(const ExternalLabel* label, Register pp) { |
+void Assembler::Call(const ExternalLabel* label, Register pp, bool emit_fixup) { |
if (Isolate::Current() == Dart::vm_isolate()) { |
call(label); |
} else { |
const int32_t offset = Array::element_offset( |
object_pool_.FindExternalLabel(label, kNotPatchable)); |
call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); |
+ if (emit_fixup) { |
+ EmitFixup(new ObjectPoolIndexFixup()); |
+ } |
} |
} |
@@ -2612,13 +2615,15 @@ void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { |
// Patchable jumps always use a 32-bit immediate encoding. |
jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); |
ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes); |
+ EmitFixup(new ObjectPoolIndexFixup()); |
} |
void Assembler::Jmp(const ExternalLabel* label, Register pp) { |
const int32_t offset = Array::element_offset( |
object_pool_.FindExternalLabel(label, kNotPatchable)); |
- jmp(Address(pp, offset - kHeapObjectTag)); |
+ jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); |
+ EmitFixup(new ObjectPoolIndexFixup()); |
} |
@@ -2817,7 +2822,7 @@ void Assembler::LoadIsolate(Register dst) { |
void Assembler::LoadObject(Register dst, const Object& object, Register pp) { |
if (CanLoadFromObjectPool(object)) { |
const int32_t offset = |
- Array::element_offset(object_pool_.FindObject(object, kNotPatchable)); |
+ Array::element_offset(object_pool_.FindObject(object)); |
LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); |
} else { |
ASSERT((Isolate::Current() == Dart::vm_isolate()) || |
@@ -2852,7 +2857,7 @@ void Assembler::PushObject(const Object& object, Register pp) { |
void Assembler::CompareObject(Register reg, const Object& object, Register pp) { |
if (CanLoadFromObjectPool(object)) { |
const int32_t offset = |
- Array::element_offset(object_pool_.FindObject(object, kNotPatchable)); |
+ Array::element_offset(object_pool_.FindObject(object)); |
cmpq(reg, Address(pp, offset-kHeapObjectTag)); |
} else { |
CompareImmediate( |
@@ -2863,8 +2868,8 @@ void Assembler::CompareObject(Register reg, const Object& object, Register pp) { |
intptr_t Assembler::FindImmediate(int64_t imm) { |
ASSERT(Isolate::Current() != Dart::vm_isolate()); |
- const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(imm)); |
- return object_pool_.FindObject(smi, kNotPatchable); |
+ const Smi& smi = Smi::ZoneHandle(reinterpret_cast<RawSmi*>(imm)); |
+ return object_pool_.FindObject(smi); |
} |
@@ -3064,7 +3069,8 @@ void Assembler::StoreIntoObject(Register object, |
movq(RDX, object); |
} |
StubCode* stub_code = Isolate::Current()->stub_code(); |
- Call(&stub_code->UpdateStoreBufferLabel(), PP); |
+ Call(&stub_code->UpdateStoreBufferLabel(), PP, |
+ false); // No fixup. Stub is at a fixed place in the object pool. |
if (value != RDX) popq(RDX); |
Bind(&done); |
} |