Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(416)

Unified Diff: runtime/vm/flow_graph_compiler_x64.cc

Issue 22825023: Uses an object pool on x64 (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: runtime/vm/flow_graph_compiler_x64.cc
===================================================================
--- runtime/vm/flow_graph_compiler_x64.cc (revision 27135)
+++ runtime/vm/flow_graph_compiler_x64.cc (working copy)
@@ -66,10 +66,9 @@
// The real frame starts here.
builder->MarkFrameStart();
- // Callee's PC marker is not used anymore. Pass Function::null() to set to 0.
+ // Current PP, FP, and PC.
+ builder->AddPp(current->function(), slot_ix++);
builder->AddPcMarker(Function::Handle(), slot_ix++);
-
- // Current FP and PC.
builder->AddCallerFp(slot_ix++);
builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++);
@@ -85,13 +84,14 @@
builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++);
}
- // Current PC marker and caller FP.
- builder->AddPcMarker(current->function(), slot_ix++);
- builder->AddCallerFp(slot_ix++);
-
Environment* previous = current;
current = current->outer();
while (current != NULL) {
+ // PP, FP, and PC.
+ builder->AddPp(current->function(), slot_ix++);
+ builder->AddPcMarker(previous->function(), slot_ix++);
+ builder->AddCallerFp(slot_ix++);
+
// For any outer environment the deopt id is that of the call instruction
// which is recorded in the outer environment.
builder->AddReturnAddress(current->function(),
@@ -115,10 +115,6 @@
slot_ix++);
}
- // PC marker and caller FP.
- builder->AddPcMarker(current->function(), slot_ix++);
- builder->AddCallerFp(slot_ix++);
-
// Iterate on the outer environment.
previous = current;
current = current->outer();
@@ -126,7 +122,11 @@
// The previous pointer is now the outermost environment.
ASSERT(previous != NULL);
- // For the outermost environment, set caller PC.
+ // For the outermost environment, set caller PC, caller PP, and caller FP.
+ builder->AddCallerPp(slot_ix++);
+ // PC marker.
+ builder->AddPcMarker(previous->function(), slot_ix++);
+ builder->AddCallerFp(slot_ix++);
builder->AddCallerPc(slot_ix++);
// For the outermost environment, set the incoming arguments.
@@ -151,7 +151,7 @@
ASSERT(deopt_env() != NULL);
- __ call(&StubCode::DeoptimizeLabel());
+ __ CallFromPool(&StubCode::DeoptimizeLabel());
set_pc_offset(assem->CodeSize());
__ int3();
#undef __
@@ -165,10 +165,8 @@
void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
Label* is_true,
Label* is_false) {
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
Label fall_through;
- __ cmpq(bool_register, raw_null);
+ __ CompareObject(bool_register, Object::Handle(Object::null()));
srdjan 2013/09/04 22:57:23 Object::Handle() is same as Object::Handle(Object:
zra 2013/09/05 00:23:11 Done.
__ j(EQUAL, &fall_through, Assembler::kNearJump);
__ CompareObject(bool_register, Bool::True());
__ j(EQUAL, is_true);
@@ -187,22 +185,21 @@
Label* is_not_instance_lbl) {
const SubtypeTestCache& type_test_cache =
SubtypeTestCache::ZoneHandle(SubtypeTestCache::New());
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
- __ LoadObject(temp_reg, type_test_cache);
+ __ LoadObjectFromPool(temp_reg, type_test_cache,
+ Assembler::kNotPatchable, PP);
__ pushq(temp_reg); // Subtype test cache.
__ pushq(instance_reg); // Instance.
if (test_kind == kTestTypeOneArg) {
ASSERT(type_arguments_reg == kNoRegister);
- __ pushq(raw_null);
- __ call(&StubCode::Subtype1TestCacheLabel());
+ __ PushObject(Object::Handle(Object::null()));
+ __ CallFromPool(&StubCode::Subtype1TestCacheLabel());
} else if (test_kind == kTestTypeTwoArgs) {
ASSERT(type_arguments_reg == kNoRegister);
- __ pushq(raw_null);
- __ call(&StubCode::Subtype2TestCacheLabel());
+ __ PushObject(Object::Handle(Object::null()));
+ __ CallFromPool(&StubCode::Subtype2TestCacheLabel());
} else if (test_kind == kTestTypeThreeArgs) {
__ pushq(type_arguments_reg);
- __ call(&StubCode::Subtype3TestCacheLabel());
+ __ CallFromPool(&StubCode::Subtype3TestCacheLabel());
} else {
UNREACHABLE();
}
@@ -347,11 +344,9 @@
}
if (type.IsFunctionType()) {
// Check if instance is a closure.
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
__ LoadClassById(R13, kClassIdReg);
__ movq(R13, FieldAddress(R13, Class::signature_function_offset()));
- __ cmpq(R13, raw_null);
+ __ CompareObject(R13, Object::Handle(Object::null()));
__ j(NOT_EQUAL, is_instance_lbl);
}
// Custom checking for numbers (Smi, Mint, Bigint and Double).
@@ -414,15 +409,13 @@
__ Comment("UninstantiatedTypeTest");
ASSERT(!type.IsInstantiated());
// Skip check if destination is a dynamic type.
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
if (type.IsTypeParameter()) {
const TypeParameter& type_param = TypeParameter::Cast(type);
// Load instantiator (or null) and instantiator type arguments on stack.
__ movq(RDX, Address(RSP, 0)); // Get instantiator type arguments.
// RDX: instantiator type arguments.
// Check if type argument is dynamic.
- __ cmpq(RDX, raw_null);
+ __ CompareObject(RDX, Object::Handle(Object::null()));
__ j(EQUAL, is_instance_lbl);
// Can handle only type arguments that are instances of TypeArguments.
// (runtime checks canonicalize type arguments).
@@ -435,7 +428,7 @@
// Check if type argument is dynamic.
__ CompareObject(RDI, Type::ZoneHandle(Type::DynamicType()));
__ j(EQUAL, is_instance_lbl);
- __ cmpq(RDI, raw_null);
+ __ CompareObject(RDI, Object::Handle(Object::null()));
__ j(EQUAL, is_instance_lbl);
const Type& object_type = Type::ZoneHandle(Type::ObjectType());
__ CompareObject(RDI, object_type);
@@ -575,8 +568,6 @@
LocationSummary* locs) {
ASSERT(type.IsFinalized() && !type.IsMalformed() && !type.IsMalbounded());
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
Label is_instance, is_not_instance;
__ pushq(RCX); // Store instantiator on stack.
__ pushq(RDX); // Store instantiator type arguments.
@@ -590,7 +581,7 @@
// We can only inline this null check if the type is instantiated at compile
// time, since an uninstantiated type at compile time could be Object or
// dynamic at run time.
- __ cmpq(RAX, raw_null);
+ __ CompareObject(RAX, Object::Handle(Object::null()));
__ j(EQUAL, &is_not_instance);
}
@@ -610,7 +601,7 @@
__ PushObject(type); // Push the type.
__ pushq(RCX); // TODO(srdjan): Pass instantiator instead of null.
__ pushq(RDX); // Instantiator type arguments.
- __ LoadObject(RAX, test_cache);
+ __ LoadObjectFromPool(RAX, test_cache, Assembler::kNotPatchable, PP);
__ pushq(RAX);
GenerateCallRuntime(token_pos,
deopt_id,
@@ -622,21 +613,23 @@
__ Drop(5);
if (negate_result) {
__ popq(RDX);
- __ LoadObject(RAX, Bool::True());
+ __ LoadObjectFromPool(RAX, Bool::True(), Assembler::kNotPatchable, PP);
__ cmpq(RDX, RAX);
__ j(NOT_EQUAL, &done, Assembler::kNearJump);
- __ LoadObject(RAX, Bool::False());
+ __ LoadObjectFromPool(RAX, Bool::False(), Assembler::kNotPatchable, PP);
} else {
__ popq(RAX);
}
__ jmp(&done, Assembler::kNearJump);
}
__ Bind(&is_not_instance);
- __ LoadObject(RAX, Bool::Get(negate_result));
+ __ LoadObjectFromPool(RAX, Bool::Get(negate_result),
+ Assembler::kNotPatchable, PP);
__ jmp(&done, Assembler::kNearJump);
__ Bind(&is_instance);
- __ LoadObject(RAX, Bool::Get(!negate_result));
+ __ LoadObjectFromPool(RAX, Bool::Get(!negate_result),
+ Assembler::kNotPatchable, PP);
__ Bind(&done);
__ popq(RDX); // Remove pushed instantiator type arguments.
__ popq(RCX); // Remove pushed instantiator.
@@ -669,10 +662,8 @@
__ pushq(RCX); // Store instantiator.
__ pushq(RDX); // Store instantiator type arguments.
// A null object is always assignable and is returned as result.
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
Label is_assignable, runtime_call;
- __ cmpq(RAX, raw_null);
+ __ CompareObject(RAX, Object::Handle(Object::null()));
__ j(EQUAL, &is_assignable);
if (!FLAG_eliminate_type_checks || dst_type.IsMalformed()) {
@@ -725,7 +716,7 @@
__ pushq(RCX); // Instantiator.
__ pushq(RDX); // Instantiator type arguments.
__ PushObject(dst_name); // Push the name of the destination.
- __ LoadObject(RAX, test_cache);
+ __ LoadObjectFromPool(RAX, test_cache, Assembler::kNotPatchable, PP);
__ pushq(RAX);
GenerateCallRuntime(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs);
// Pop the parameters supplied to the runtime entry. The result of the
@@ -770,7 +761,7 @@
__ pushq(RAX);
*push_emitted = true;
}
- __ LoadObject(RAX, loc.constant());
+ __ LoadObjectFromPool(RAX, loc.constant(), Assembler::kNotPatchable, PP);
__ movq(dest, RAX);
} else if (loc.IsRegister()) {
if (*push_emitted && loc.reg() == RAX) {
@@ -900,8 +891,6 @@
__ j(POSITIVE, &loop, Assembler::kNearJump);
// Copy or initialize optional named arguments.
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
Label all_arguments_processed;
#ifdef DEBUG
const bool check_correct_named_args = true;
@@ -963,7 +952,7 @@
const Object& value = Object::ZoneHandle(
parsed_function().default_parameter_values().At(
param_pos - num_fixed_params));
- __ LoadObject(RAX, value);
+ __ LoadObjectFromPool(RAX, value, Assembler::kNotPatchable, PP);
__ Bind(&assign_optional_parameter);
// Assign RAX to fp[kFirstLocalSlotFromFp - param_pos].
// We do not use the final allocation index of the variable here, i.e.
@@ -978,7 +967,9 @@
if (check_correct_named_args) {
// Check that RDI now points to the null terminator in the arguments
// descriptor.
- __ cmpq(Address(RDI, 0), raw_null);
+ __ LoadObjectFromPool(TMP, Object::Handle(Object::null()),
+ Assembler::kNotPatchable, PP);
+ __ cmpq(Address(RDI, 0), TMP);
__ j(EQUAL, &all_arguments_processed, Assembler::kNearJump);
}
} else {
@@ -997,7 +988,7 @@
// Load RAX with default argument.
const Object& value = Object::ZoneHandle(
parsed_function().default_parameter_values().At(i));
- __ LoadObject(RAX, value);
+ __ LoadObjectFromPool(RAX, value, Assembler::kNotPatchable, PP);
// Assign RAX to fp[kFirstLocalSlotFromFp - param_pos].
// We do not use the final allocation index of the variable here, i.e.
// scope->VariableAt(i)->index(), because captured variables still need
@@ -1023,8 +1014,8 @@
const ICData& ic_data = ICData::ZoneHandle(
ICData::New(function, Symbols::Call(), Object::empty_array(),
Isolate::kNoDeoptId, kNumArgsChecked));
- __ LoadObject(RBX, ic_data);
- __ LeaveFrame(); // The arguments are still on the stack.
+ __ LoadObjectFromPool(RBX, ic_data, Assembler::kNotPatchable, PP);
+ __ LeaveFrameWithPP(); // The arguments are still on the stack.
__ jmp(&StubCode::CallNoSuchMethodFunctionLabel());
// The noSuchMethod call may return to the caller, but not here.
__ int3();
@@ -1042,12 +1033,14 @@
// R10 : arguments descriptor array.
__ movq(RCX, FieldAddress(R10, ArgumentsDescriptor::count_offset()));
__ SmiUntag(RCX);
+ __ LoadObjectFromPool(R12, Object::Handle(Object::null()),
+ Assembler::kNotPatchable, PP);
Label null_args_loop, null_args_loop_condition;
__ jmp(&null_args_loop_condition, Assembler::kNearJump);
const Address original_argument_addr(
RBP, RCX, TIMES_8, (kParamEndSlotFromFp + 1) * kWordSize);
__ Bind(&null_args_loop);
- __ movq(original_argument_addr, raw_null);
+ __ movq(original_argument_addr, R12);
__ Bind(&null_args_loop_condition);
__ decq(RCX);
__ j(POSITIVE, &null_args_loop, Assembler::kNearJump);
@@ -1072,20 +1065,45 @@
__ movq(RAX, Address(RSP, 2 * kWordSize)); // Receiver.
__ movq(RBX, Address(RSP, 1 * kWordSize)); // Value.
__ StoreIntoObject(RAX, FieldAddress(RAX, offset), RBX);
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
- __ movq(RAX, raw_null);
+ __ LoadObjectFromPool(RAX, Object::Handle(Object::null()),
+ Assembler::kNotPatchable, PP);
__ ret();
}
void FlowGraphCompiler::EmitFrameEntry() {
const Function& function = parsed_function().function();
+ Register new_pp = kNoRegister;
+ Register new_pc = kNoRegister;
if (CanOptimizeFunction() &&
function.is_optimizable() &&
(!is_optimizing() || may_reoptimize())) {
const Register function_reg = RDI;
- __ LoadObject(function_reg, function);
+ new_pp = R13;
+ new_pc = R12;
+
+ Label next;
+ __ nop(4); // Need a fixed size sequence on frame entry.
+ __ call(&next);
+ __ Bind(&next);
+
+ const intptr_t object_pool_pc_dist =
+ Instructions::HeaderSize() - Instructions::object_pool_offset() +
+ __ CodeSize();
+ const intptr_t offset =
+ Assembler::kEntryPointToPcMarkerOffset - __ CodeSize();
+ __ popq(new_pc);
+ if (offset != 0) {
+ __ addq(new_pc, Immediate(offset));
+ }
+
+ // Load callee's pool pointer.
+ __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset));
+
+ // Load function object using the callee's pool pointer.
+ __ LoadObjectFromPool(function_reg, function,
+ Assembler::kPatchable, new_pp);
+
// Patch point is after the eventually inlined function object.
AddCurrentDescriptor(PcDescriptors::kEntryPatch,
Isolate::kNoDeoptId,
@@ -1101,8 +1119,31 @@
Immediate(FLAG_optimization_counter_threshold));
}
ASSERT(function_reg == RDI);
- __ j(GREATER_EQUAL, &StubCode::OptimizeFunctionLabel());
+ __ ConditionalJumpFromPool(GREATER_EQUAL,
+ &StubCode::OptimizeFunctionLabel(), R13);
} else if (!flow_graph().IsCompiledForOsr()) {
+ // We have to load the PP here too because a load of an external label
+ // may be patched at the AddCurrentDescriptor below.
+ new_pp = R13;
+ new_pc = R12;
+
+ Label next;
+ __ nop(4); // Need a fixed size sequence on frame entry.
+ __ call(&next);
+ __ Bind(&next);
+
+ const intptr_t object_pool_pc_dist =
+ Instructions::HeaderSize() - Instructions::object_pool_offset() +
+ __ CodeSize();
+ const intptr_t offset =
+ Assembler::kEntryPointToPcMarkerOffset - __ CodeSize();
+ __ popq(new_pc);
+ if (offset != 0) {
+ __ addq(new_pc, Immediate(offset));
+ }
+
+ // Load callee's pool pointer.
+ __ movq(new_pp, Address(new_pc, -object_pool_pc_dist - offset));
AddCurrentDescriptor(PcDescriptors::kEntryPatch,
Isolate::kNoDeoptId,
0); // No token position.
@@ -1113,10 +1154,10 @@
- flow_graph().num_stack_locals()
- flow_graph().num_copied_params();
ASSERT(extra_slots >= 0);
- __ EnterOsrFrame(extra_slots * kWordSize);
+ __ EnterOsrFrame(extra_slots * kWordSize, new_pp, new_pc);
} else {
ASSERT(StackSize() >= 0);
- __ EnterDartFrame(StackSize() * kWordSize);
+ __ EnterDartFrame(StackSize() * kWordSize, new_pp, new_pc);
}
}
@@ -1170,8 +1211,8 @@
const ICData& ic_data = ICData::ZoneHandle(
ICData::New(function, name, Object::empty_array(),
Isolate::kNoDeoptId, kNumArgsChecked));
- __ LoadObject(RBX, ic_data);
- __ LeaveFrame(); // The arguments are still on the stack.
+ __ LoadObjectFromPool(RBX, ic_data, Assembler::kNotPatchable, PP);
+ __ LeaveFrameWithPP(); // The arguments are still on the stack.
__ jmp(&StubCode::CallNoSuchMethodFunctionLabel());
// The noSuchMethod call may return to the caller, but not here.
__ int3();
@@ -1189,9 +1230,8 @@
if (!is_optimizing() && (num_locals > 0)) {
__ Comment("Initialize spill slots");
const intptr_t slot_base = parsed_function().first_stack_local_index();
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
- __ movq(RAX, raw_null);
+ __ LoadObjectFromPool(RAX, Object::Handle(Object::null()),
+ Assembler::kNotPatchable, PP);
for (intptr_t i = 0; i < num_locals; ++i) {
// Subtract index i (locals lie at lower addresses than RBP).
__ movq(Address(RBP, (slot_base - i) * kWordSize), RAX);
@@ -1218,11 +1258,15 @@
AddCurrentDescriptor(PcDescriptors::kPatchCode,
Isolate::kNoDeoptId,
0); // No token position.
- __ jmp(&StubCode::FixCallersTargetLabel());
+ // This is patched up to a point in FrameEntry where the PP for the
+ // current function is in R13 instead of PP.
+ __ JumpPatchable(&StubCode::FixCallersTargetLabel(), R13);
+
+ // TOOD(zra): Is this descriptor used?
AddCurrentDescriptor(PcDescriptors::kLazyDeoptJump,
Isolate::kNoDeoptId,
0); // No token position.
- __ jmp(&StubCode::DeoptimizeLazyLabel());
+ __ JumpFromPool(&StubCode::DeoptimizeLazyLabel(), PP);
}
@@ -1230,7 +1274,7 @@
const ExternalLabel* label,
PcDescriptors::Kind kind,
LocationSummary* locs) {
- __ call(label);
+ __ CallFromPool(label);
AddCurrentDescriptor(kind, Isolate::kNoDeoptId, token_pos);
RecordSafepoint(locs);
}
@@ -1241,7 +1285,7 @@
const ExternalLabel* label,
PcDescriptors::Kind kind,
LocationSummary* locs) {
- __ call(label);
+ __ CallPatchable(label);
AddCurrentDescriptor(kind, deopt_id, token_pos);
RecordSafepoint(locs);
// Marks either the continuation point in unoptimized code or the
@@ -1346,7 +1390,7 @@
// RAX: class ID of the receiver (smi).
__ Bind(&load_cache);
- __ LoadObject(RBX, cache);
+ __ LoadObjectFromPool(RBX, cache, Assembler::kNotPatchable, PP);
__ movq(RDI, FieldAddress(RBX, MegamorphicCache::buckets_offset()));
__ movq(RBX, FieldAddress(RBX, MegamorphicCache::mask_offset()));
// RDI: cache buckets array.
@@ -1378,8 +1422,9 @@
__ movq(RAX, FieldAddress(RDI, RCX, TIMES_8, base + kWordSize));
__ movq(RAX, FieldAddress(RAX, Function::code_offset()));
__ movq(RAX, FieldAddress(RAX, Code::instructions_offset()));
- __ LoadObject(RBX, ic_data);
- __ LoadObject(R10, arguments_descriptor);
+ __ LoadObjectFromPool(RBX, ic_data, Assembler::kNotPatchable, PP);
+ __ LoadObjectFromPool(R10, arguments_descriptor,
+ Assembler::kNotPatchable, PP);
__ addq(RAX, Immediate(Instructions::HeaderSize() - kHeapObjectTag));
__ call(RAX);
AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos);
@@ -1429,9 +1474,9 @@
__ pushq(reg);
__ PushObject(obj);
if (is_optimizing()) {
- __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
+ __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
} else {
- __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
+ __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
}
AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
Isolate::kNoDeoptId,
@@ -1453,9 +1498,9 @@
__ pushq(left);
__ pushq(right);
if (is_optimizing()) {
- __ call(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
+ __ CallPatchable(&StubCode::OptimizedIdenticalWithNumberCheckLabel());
} else {
- __ call(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
+ __ CallPatchable(&StubCode::UnoptimizedIdenticalWithNumberCheckLabel());
}
AddCurrentDescriptor(PcDescriptors::kRuntimeCall,
Isolate::kNoDeoptId,
@@ -1473,12 +1518,12 @@
// Fallthrough calls super equality.
void FlowGraphCompiler::EmitSuperEqualityCallPrologue(Register result,
Label* skip_call) {
- const Immediate& raw_null =
- Immediate(reinterpret_cast<intptr_t>(Object::null()));
+ __ LoadObjectFromPool(TMP, Object::Handle(Object::null()),
+ Assembler::kNotPatchable, PP);
Label check_identity, fall_through;
- __ cmpq(Address(RSP, 0 * kWordSize), raw_null);
+ __ cmpq(Address(RSP, 0 * kWordSize), TMP);
__ j(EQUAL, &check_identity, Assembler::kNearJump);
- __ cmpq(Address(RSP, 1 * kWordSize), raw_null);
+ __ cmpq(Address(RSP, 1 * kWordSize), TMP);
__ j(NOT_EQUAL, &fall_through, Assembler::kNearJump);
__ Bind(&check_identity);
@@ -1486,11 +1531,11 @@
__ cmpq(result, Address(RSP, 0 * kWordSize));
Label is_false;
__ j(NOT_EQUAL, &is_false, Assembler::kNearJump);
- __ LoadObject(result, Bool::True());
+ __ LoadObjectFromPool(result, Bool::True(), Assembler::kNotPatchable, PP);
__ Drop(1);
__ jmp(skip_call);
__ Bind(&is_false);
- __ LoadObject(result, Bool::False());
+ __ LoadObjectFromPool(result, Bool::False(), Assembler::kNotPatchable, PP);
__ Drop(1);
__ jmp(skip_call);
__ Bind(&fall_through);
@@ -1573,7 +1618,8 @@
const Array& arguments_descriptor =
Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
argument_names));
- __ LoadObject(R10, arguments_descriptor);
+ __ LoadObjectFromPool(R10, arguments_descriptor,
+ Assembler::kNotPatchable, PP);
for (intptr_t i = 0; i < len; i++) {
const bool is_last_check = (i == (len - 1));
Label next_test;
@@ -1615,7 +1661,6 @@
}
-
void FlowGraphCompiler::EmitDoubleCompareBool(Condition true_condition,
FpuRegister left,
FpuRegister right,
@@ -1625,10 +1670,12 @@
assembler()->j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN false;
assembler()->j(true_condition, &is_true, Assembler::kNearJump);
assembler()->Bind(&is_false);
- assembler()->LoadObject(result, Bool::False());
+ assembler()->LoadObjectFromPool(result, Bool::False(),
+ Assembler::kNotPatchable, PP);
assembler()->jmp(&done);
assembler()->Bind(&is_true);
- assembler()->LoadObject(result, Bool::True());
+ assembler()->LoadObjectFromPool(result, Bool::True(),
+ Assembler::kNotPatchable, PP);
assembler()->Bind(&done);
}
@@ -1749,7 +1796,8 @@
if (constant.IsSmi() && (Smi::Cast(constant).Value() == 0)) {
__ xorq(destination.reg(), destination.reg());
} else {
- __ LoadObject(destination.reg(), constant);
+ __ LoadObjectFromPool(destination.reg(), constant,
+ Assembler::kNotPatchable, PP);
}
} else {
ASSERT(destination.IsStackSlot());

Powered by Google App Engine
This is Rietveld 408576698