Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(129)

Side by Side Diff: runtime/vm/intermediate_language_arm64.cc

Issue 1264543002: Simplify constant pool usage in arm64 code generator (by removing extra argument (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64.
6 #if defined(TARGET_ARCH_ARM64) 6 #if defined(TARGET_ARCH_ARM64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
49 49
50 50
51 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 51 void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
52 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode 52 // In SSA mode, we need an explicit push. Nothing to do in non-SSA mode
53 // where PushArgument is handled by BindInstr::EmitNativeCode. 53 // where PushArgument is handled by BindInstr::EmitNativeCode.
54 if (compiler->is_optimizing()) { 54 if (compiler->is_optimizing()) {
55 Location value = locs()->in(0); 55 Location value = locs()->in(0);
56 if (value.IsRegister()) { 56 if (value.IsRegister()) {
57 __ Push(value.reg()); 57 __ Push(value.reg());
58 } else if (value.IsConstant()) { 58 } else if (value.IsConstant()) {
59 __ PushObject(value.constant(), PP); 59 __ PushObject(value.constant());
60 } else { 60 } else {
61 ASSERT(value.IsStackSlot()); 61 ASSERT(value.IsStackSlot());
62 const intptr_t value_offset = value.ToStackSlotOffset(); 62 const intptr_t value_offset = value.ToStackSlotOffset();
63 __ LoadFromOffset(TMP, value.base_reg(), value_offset, PP); 63 __ LoadFromOffset(TMP, value.base_reg(), value_offset);
64 __ Push(TMP); 64 __ Push(TMP);
65 } 65 }
66 } 66 }
67 } 67 }
68 68
69 69
70 LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, 70 LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone,
71 bool opt) const { 71 bool opt) const {
72 const intptr_t kNumInputs = 1; 72 const intptr_t kNumInputs = 1;
73 const intptr_t kNumTemps = 0; 73 const intptr_t kNumTemps = 0;
(...skipping 17 matching lines...) Expand all
91 return; 91 return;
92 } 92 }
93 93
94 #if defined(DEBUG) 94 #if defined(DEBUG)
95 Label stack_ok; 95 Label stack_ok;
96 __ Comment("Stack Check"); 96 __ Comment("Stack Check");
97 const intptr_t fp_sp_dist = 97 const intptr_t fp_sp_dist =
98 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; 98 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize;
99 ASSERT(fp_sp_dist <= 0); 99 ASSERT(fp_sp_dist <= 0);
100 __ sub(R2, SP, Operand(FP)); 100 __ sub(R2, SP, Operand(FP));
101 __ CompareImmediate(R2, fp_sp_dist, PP); 101 __ CompareImmediate(R2, fp_sp_dist);
102 __ b(&stack_ok, EQ); 102 __ b(&stack_ok, EQ);
103 __ brk(0); 103 __ brk(0);
104 __ Bind(&stack_ok); 104 __ Bind(&stack_ok);
105 #endif 105 #endif
106 __ LeaveDartFrame(); 106 __ LeaveDartFrame(); // Disallows constant pool use.
107 __ ret(); 107 __ ret();
108 // This ReturnInstr may be emitted out of order by the optimizer. The next
109 // block may be a target expecting a properly set constant pool pointer.
110 __ set_constant_pool_allowed(true);
108 } 111 }
109 112
110 113
111 static Condition NegateCondition(Condition condition) { 114 static Condition NegateCondition(Condition condition) {
112 switch (condition) { 115 switch (condition) {
113 case EQ: return NE; 116 case EQ: return NE;
114 case NE: return EQ; 117 case NE: return EQ;
115 case LT: return GE; 118 case LT: return GE;
116 case LE: return GT; 119 case LE: return GT;
117 case GT: return LE; 120 case GT: return LE;
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
176 __ cset(result, true_condition); 179 __ cset(result, true_condition);
177 180
178 if (is_power_of_two_kind) { 181 if (is_power_of_two_kind) {
179 const intptr_t shift = 182 const intptr_t shift =
180 Utils::ShiftForPowerOfTwo(Utils::Maximum(true_value, false_value)); 183 Utils::ShiftForPowerOfTwo(Utils::Maximum(true_value, false_value));
181 __ LslImmediate(result, result, shift + kSmiTagSize); 184 __ LslImmediate(result, result, shift + kSmiTagSize);
182 } else { 185 } else {
183 __ sub(result, result, Operand(1)); 186 __ sub(result, result, Operand(1));
184 const int64_t val = 187 const int64_t val =
185 Smi::RawValue(true_value) - Smi::RawValue(false_value); 188 Smi::RawValue(true_value) - Smi::RawValue(false_value);
186 __ AndImmediate(result, result, val, PP); 189 __ AndImmediate(result, result, val);
187 if (false_value != 0) { 190 if (false_value != 0) {
188 __ AddImmediate(result, result, Smi::RawValue(false_value), PP); 191 __ AddImmediate(result, result, Smi::RawValue(false_value));
189 } 192 }
190 } 193 }
191 } 194 }
192 195
193 196
194 LocationSummary* ClosureCallInstr::MakeLocationSummary(Zone* zone, 197 LocationSummary* ClosureCallInstr::MakeLocationSummary(Zone* zone,
195 bool opt) const { 198 bool opt) const {
196 const intptr_t kNumInputs = 1; 199 const intptr_t kNumInputs = 1;
197 const intptr_t kNumTemps = 0; 200 const intptr_t kNumTemps = 0;
198 LocationSummary* summary = new(zone) LocationSummary( 201 LocationSummary* summary = new(zone) LocationSummary(
199 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 202 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
200 summary->set_in(0, Location::RegisterLocation(R0)); // Function. 203 summary->set_in(0, Location::RegisterLocation(R0)); // Function.
201 summary->set_out(0, Location::RegisterLocation(R0)); 204 summary->set_out(0, Location::RegisterLocation(R0));
202 return summary; 205 return summary;
203 } 206 }
204 207
205 208
206 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 209 void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
207 // Load arguments descriptor in R4. 210 // Load arguments descriptor in R4.
208 int argument_count = ArgumentCount(); 211 int argument_count = ArgumentCount();
209 const Array& arguments_descriptor = 212 const Array& arguments_descriptor =
210 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count, 213 Array::ZoneHandle(ArgumentsDescriptor::New(argument_count,
211 argument_names())); 214 argument_names()));
212 __ LoadObject(R4, arguments_descriptor, PP); 215 __ LoadObject(R4, arguments_descriptor);
213 216
214 // R4: Arguments descriptor. 217 // R4: Arguments descriptor.
215 // R0: Function. 218 // R0: Function.
216 ASSERT(locs()->in(0).reg() == R0); 219 ASSERT(locs()->in(0).reg() == R0);
217 __ LoadFieldFromOffset(R2, R0, Function::instructions_offset(), PP); 220 __ LoadFieldFromOffset(R2, R0, Function::instructions_offset());
218 221
219 // R2: instructions. 222 // R2: instructions.
220 // R5: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value). 223 // R5: Smi 0 (no IC data; the lazy-compile stub expects a GC-safe value).
221 __ LoadImmediate(R5, 0, PP); 224 __ LoadImmediate(R5, 0);
222 __ AddImmediate(R2, R2, Instructions::HeaderSize() - kHeapObjectTag, PP); 225 __ AddImmediate(R2, R2, Instructions::HeaderSize() - kHeapObjectTag);
223 __ blr(R2); 226 __ blr(R2);
224 compiler->RecordSafepoint(locs()); 227 compiler->RecordSafepoint(locs());
225 // Marks either the continuation point in unoptimized code or the 228 // Marks either the continuation point in unoptimized code or the
226 // deoptimization point in optimized code, after call. 229 // deoptimization point in optimized code, after call.
227 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id()); 230 const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id());
228 if (compiler->is_optimizing()) { 231 if (compiler->is_optimizing()) {
229 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos()); 232 compiler->AddDeoptIndexAtCall(deopt_id_after, token_pos());
230 } 233 }
231 // Add deoptimization continuation point after the call and before the 234 // Add deoptimization continuation point after the call and before the
232 // arguments are removed. 235 // arguments are removed.
233 // In optimized code this descriptor is needed for exception handling. 236 // In optimized code this descriptor is needed for exception handling.
234 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, 237 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt,
235 deopt_id_after, 238 deopt_id_after,
236 token_pos()); 239 token_pos());
237 __ Drop(argument_count); 240 __ Drop(argument_count);
238 } 241 }
239 242
240 243
241 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone, 244 LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone,
242 bool opt) const { 245 bool opt) const {
243 return LocationSummary::Make(zone, 246 return LocationSummary::Make(zone,
244 0, 247 0,
245 Location::RequiresRegister(), 248 Location::RequiresRegister(),
246 LocationSummary::kNoCall); 249 LocationSummary::kNoCall);
247 } 250 }
248 251
249 252
250 void LoadLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 253 void LoadLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
251 const Register result = locs()->out(0).reg(); 254 const Register result = locs()->out(0).reg();
252 __ LoadFromOffset(result, FP, local().index() * kWordSize, PP); 255 __ LoadFromOffset(result, FP, local().index() * kWordSize);
253 } 256 }
254 257
255 258
256 LocationSummary* StoreLocalInstr::MakeLocationSummary(Zone* zone, 259 LocationSummary* StoreLocalInstr::MakeLocationSummary(Zone* zone,
257 bool opt) const { 260 bool opt) const {
258 return LocationSummary::Make(zone, 261 return LocationSummary::Make(zone,
259 1, 262 1,
260 Location::SameAsFirstInput(), 263 Location::SameAsFirstInput(),
261 LocationSummary::kNoCall); 264 LocationSummary::kNoCall);
262 } 265 }
263 266
264 267
265 void StoreLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 268 void StoreLocalInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
266 const Register value = locs()->in(0).reg(); 269 const Register value = locs()->in(0).reg();
267 const Register result = locs()->out(0).reg(); 270 const Register result = locs()->out(0).reg();
268 ASSERT(result == value); // Assert that register assignment is correct. 271 ASSERT(result == value); // Assert that register assignment is correct.
269 __ StoreToOffset(value, FP, local().index() * kWordSize, PP); 272 __ StoreToOffset(value, FP, local().index() * kWordSize);
270 } 273 }
271 274
272 275
273 LocationSummary* ConstantInstr::MakeLocationSummary(Zone* zone, 276 LocationSummary* ConstantInstr::MakeLocationSummary(Zone* zone,
274 bool opt) const { 277 bool opt) const {
275 return LocationSummary::Make(zone, 278 return LocationSummary::Make(zone,
276 0, 279 0,
277 Location::RequiresRegister(), 280 Location::RequiresRegister(),
278 LocationSummary::kNoCall); 281 LocationSummary::kNoCall);
279 } 282 }
280 283
281 284
282 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 285 void ConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
283 // The register allocator drops constant definitions that have no uses. 286 // The register allocator drops constant definitions that have no uses.
284 if (!locs()->out(0).IsInvalid()) { 287 if (!locs()->out(0).IsInvalid()) {
285 const Register result = locs()->out(0).reg(); 288 const Register result = locs()->out(0).reg();
286 __ LoadObject(result, value(), PP); 289 __ LoadObject(result, value());
287 } 290 }
288 } 291 }
289 292
290 293
291 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone, 294 LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone,
292 bool opt) const { 295 bool opt) const {
293 const intptr_t kNumInputs = 0; 296 const intptr_t kNumInputs = 0;
294 const Location out = (representation_ == kUnboxedInt32) ? 297 const Location out = (representation_ == kUnboxedInt32) ?
295 Location::RequiresRegister() : Location::RequiresFpuRegister(); 298 Location::RequiresRegister() : Location::RequiresFpuRegister();
296 return LocationSummary::Make(zone, 299 return LocationSummary::Make(zone,
297 kNumInputs, 300 kNumInputs,
298 out, 301 out,
299 LocationSummary::kNoCall); 302 LocationSummary::kNoCall);
300 } 303 }
301 304
302 305
303 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 306 void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
304 if (!locs()->out(0).IsInvalid()) { 307 if (!locs()->out(0).IsInvalid()) {
305 switch (representation_) { 308 switch (representation_) {
306 case kUnboxedDouble: 309 case kUnboxedDouble:
307 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) { 310 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) {
308 const VRegister dst = locs()->out(0).fpu_reg(); 311 const VRegister dst = locs()->out(0).fpu_reg();
309 __ veor(dst, dst, dst); 312 __ veor(dst, dst, dst);
310 } else { 313 } else {
311 const VRegister dst = locs()->out(0).fpu_reg(); 314 const VRegister dst = locs()->out(0).fpu_reg();
312 __ LoadDImmediate(dst, Double::Cast(value()).value(), PP); 315 __ LoadDImmediate(dst, Double::Cast(value()).value());
313 } 316 }
314 break; 317 break;
315 case kUnboxedInt32: 318 case kUnboxedInt32:
316 __ LoadImmediate(locs()->out(0).reg(), 319 __ LoadImmediate(locs()->out(0).reg(),
317 static_cast<int32_t>(Smi::Cast(value()).Value()), 320 static_cast<int32_t>(Smi::Cast(value()).Value()));
318 PP);
319 break; 321 break;
320 default: 322 default:
321 UNREACHABLE(); 323 UNREACHABLE();
322 break; 324 break;
323 } 325 }
324 } 326 }
325 } 327 }
326 328
327 329
328 LocationSummary* AssertAssignableInstr::MakeLocationSummary(Zone* zone, 330 LocationSummary* AssertAssignableInstr::MakeLocationSummary(Zone* zone,
(...skipping 26 matching lines...) Expand all
355 intptr_t token_pos, 357 intptr_t token_pos,
356 intptr_t deopt_id, 358 intptr_t deopt_id,
357 LocationSummary* locs, 359 LocationSummary* locs,
358 FlowGraphCompiler* compiler) { 360 FlowGraphCompiler* compiler) {
359 // Check that the type of the value is allowed in conditional context. 361 // Check that the type of the value is allowed in conditional context.
360 // Call the runtime if the object is not bool::true or bool::false. 362 // Call the runtime if the object is not bool::true or bool::false.
361 ASSERT(locs->always_calls()); 363 ASSERT(locs->always_calls());
362 Label done; 364 Label done;
363 365
364 if (Isolate::Current()->flags().type_checks()) { 366 if (Isolate::Current()->flags().type_checks()) {
365 __ CompareObject(reg, Bool::True(), PP); 367 __ CompareObject(reg, Bool::True());
366 __ b(&done, EQ); 368 __ b(&done, EQ);
367 __ CompareObject(reg, Bool::False(), PP); 369 __ CompareObject(reg, Bool::False());
368 __ b(&done, EQ); 370 __ b(&done, EQ);
369 } else { 371 } else {
370 ASSERT(Isolate::Current()->flags().asserts()); 372 ASSERT(Isolate::Current()->flags().asserts());
371 __ CompareObject(reg, Object::null_instance(), PP); 373 __ CompareObject(reg, Object::null_instance());
372 __ b(&done, NE); 374 __ b(&done, NE);
373 } 375 }
374 376
375 __ Push(reg); // Push the source object. 377 __ Push(reg); // Push the source object.
376 compiler->GenerateRuntimeCall(token_pos, 378 compiler->GenerateRuntimeCall(token_pos,
377 deopt_id, 379 deopt_id,
378 kNonBoolTypeErrorRuntimeEntry, 380 kNonBoolTypeErrorRuntimeEntry,
379 1, 381 1,
380 locs); 382 locs);
381 // We should never return here. 383 // We should never return here.
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, 451 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler,
450 LocationSummary* locs, 452 LocationSummary* locs,
451 Token::Kind kind) { 453 Token::Kind kind) {
452 Location left = locs->in(0); 454 Location left = locs->in(0);
453 Location right = locs->in(1); 455 Location right = locs->in(1);
454 ASSERT(!left.IsConstant() || !right.IsConstant()); 456 ASSERT(!left.IsConstant() || !right.IsConstant());
455 457
456 Condition true_condition = TokenKindToSmiCondition(kind); 458 Condition true_condition = TokenKindToSmiCondition(kind);
457 459
458 if (left.IsConstant()) { 460 if (left.IsConstant()) {
459 __ CompareObject(right.reg(), left.constant(), PP); 461 __ CompareObject(right.reg(), left.constant());
460 true_condition = FlipCondition(true_condition); 462 true_condition = FlipCondition(true_condition);
461 } else if (right.IsConstant()) { 463 } else if (right.IsConstant()) {
462 __ CompareObject(left.reg(), right.constant(), PP); 464 __ CompareObject(left.reg(), right.constant());
463 } else { 465 } else {
464 __ CompareRegisters(left.reg(), right.reg()); 466 __ CompareRegisters(left.reg(), right.reg());
465 } 467 }
466 return true_condition; 468 return true_condition;
467 } 469 }
468 470
469 471
470 LocationSummary* EqualityCompareInstr::MakeLocationSummary(Zone* zone, 472 LocationSummary* EqualityCompareInstr::MakeLocationSummary(Zone* zone,
471 bool opt) const { 473 bool opt) const {
472 const intptr_t kNumInputs = 2; 474 const intptr_t kNumInputs = 2;
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
544 // Special case for NaN comparison. Result is always false unless 546 // Special case for NaN comparison. Result is always false unless
545 // relational operator is !=. 547 // relational operator is !=.
546 __ b(&is_false, VS); 548 __ b(&is_false, VS);
547 } 549 }
548 EmitBranchOnCondition(compiler, true_condition, labels); 550 EmitBranchOnCondition(compiler, true_condition, labels);
549 // TODO(zra): instead of branching, use the csel instruction to get 551 // TODO(zra): instead of branching, use the csel instruction to get
550 // True or False into result. 552 // True or False into result.
551 const Register result = locs()->out(0).reg(); 553 const Register result = locs()->out(0).reg();
552 Label done; 554 Label done;
553 __ Bind(&is_false); 555 __ Bind(&is_false);
554 __ LoadObject(result, Bool::False(), PP); 556 __ LoadObject(result, Bool::False());
555 __ b(&done); 557 __ b(&done);
556 __ Bind(&is_true); 558 __ Bind(&is_true);
557 __ LoadObject(result, Bool::True(), PP); 559 __ LoadObject(result, Bool::True());
558 __ Bind(&done); 560 __ Bind(&done);
559 } 561 }
560 562
561 563
562 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 564 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
563 BranchInstr* branch) { 565 BranchInstr* branch) {
564 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); 566 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ));
565 567
566 BranchLabels labels = compiler->CreateBranchLabels(branch); 568 BranchLabels labels = compiler->CreateBranchLabels(branch);
567 Condition true_condition = EmitComparisonCode(compiler, labels); 569 Condition true_condition = EmitComparisonCode(compiler, labels);
(...skipping 21 matching lines...) Expand all
589 591
590 592
591 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, 593 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
592 BranchLabels labels) { 594 BranchLabels labels) {
593 const Register left = locs()->in(0).reg(); 595 const Register left = locs()->in(0).reg();
594 Location right = locs()->in(1); 596 Location right = locs()->in(1);
595 if (right.IsConstant()) { 597 if (right.IsConstant()) {
596 ASSERT(right.constant().IsSmi()); 598 ASSERT(right.constant().IsSmi());
597 const int64_t imm = 599 const int64_t imm =
598 reinterpret_cast<int64_t>(right.constant().raw()); 600 reinterpret_cast<int64_t>(right.constant().raw());
599 __ TestImmediate(left, imm, PP); 601 __ TestImmediate(left, imm);
600 } else { 602 } else {
601 __ tst(left, Operand(right.reg())); 603 __ tst(left, Operand(right.reg()));
602 } 604 }
603 Condition true_condition = (kind() == Token::kNE) ? NE : EQ; 605 Condition true_condition = (kind() == Token::kNE) ? NE : EQ;
604 return true_condition; 606 return true_condition;
605 } 607 }
606 608
607 609
608 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 610 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
609 // Never emitted outside of the BranchInstr. 611 // Never emitted outside of the BranchInstr.
(...skipping 30 matching lines...) Expand all
640 642
641 Label* deopt = CanDeoptimize() ? 643 Label* deopt = CanDeoptimize() ?
642 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptTestCids) : NULL; 644 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptTestCids) : NULL;
643 645
644 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0; 646 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0;
645 const ZoneGrowableArray<intptr_t>& data = cid_results(); 647 const ZoneGrowableArray<intptr_t>& data = cid_results();
646 ASSERT(data[0] == kSmiCid); 648 ASSERT(data[0] == kSmiCid);
647 bool result = data[1] == true_result; 649 bool result = data[1] == true_result;
648 __ tsti(val_reg, Immediate(kSmiTagMask)); 650 __ tsti(val_reg, Immediate(kSmiTagMask));
649 __ b(result ? labels.true_label : labels.false_label, EQ); 651 __ b(result ? labels.true_label : labels.false_label, EQ);
650 __ LoadClassId(cid_reg, val_reg, PP); 652 __ LoadClassId(cid_reg, val_reg);
651 653
652 for (intptr_t i = 2; i < data.length(); i += 2) { 654 for (intptr_t i = 2; i < data.length(); i += 2) {
653 const intptr_t test_cid = data[i]; 655 const intptr_t test_cid = data[i];
654 ASSERT(test_cid != kSmiCid); 656 ASSERT(test_cid != kSmiCid);
655 result = data[i + 1] == true_result; 657 result = data[i + 1] == true_result;
656 __ CompareImmediate(cid_reg, test_cid, PP); 658 __ CompareImmediate(cid_reg, test_cid);
657 __ b(result ? labels.true_label : labels.false_label, EQ); 659 __ b(result ? labels.true_label : labels.false_label, EQ);
658 } 660 }
659 // No match found, deoptimize or false. 661 // No match found, deoptimize or false.
660 if (deopt == NULL) { 662 if (deopt == NULL) {
661 Label* target = result ? labels.false_label : labels.true_label; 663 Label* target = result ? labels.false_label : labels.true_label;
662 if (target != labels.fall_through) { 664 if (target != labels.fall_through) {
663 __ b(target); 665 __ b(target);
664 } 666 }
665 } else { 667 } else {
666 __ b(deopt); 668 __ b(deopt);
(...skipping 12 matching lines...) Expand all
679 681
680 682
681 void TestCidsInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 683 void TestCidsInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
682 const Register result_reg = locs()->out(0).reg(); 684 const Register result_reg = locs()->out(0).reg();
683 Label is_true, is_false, done; 685 Label is_true, is_false, done;
684 BranchLabels labels = { &is_true, &is_false, &is_false }; 686 BranchLabels labels = { &is_true, &is_false, &is_false };
685 EmitComparisonCode(compiler, labels); 687 EmitComparisonCode(compiler, labels);
686 // TODO(zra): instead of branching, use the csel instruction to get 688 // TODO(zra): instead of branching, use the csel instruction to get
687 // True or False into result. 689 // True or False into result.
688 __ Bind(&is_false); 690 __ Bind(&is_false);
689 __ LoadObject(result_reg, Bool::False(), PP); 691 __ LoadObject(result_reg, Bool::False());
690 __ b(&done); 692 __ b(&done);
691 __ Bind(&is_true); 693 __ Bind(&is_true);
692 __ LoadObject(result_reg, Bool::True(), PP); 694 __ LoadObject(result_reg, Bool::True());
693 __ Bind(&done); 695 __ Bind(&done);
694 } 696 }
695 697
696 698
697 LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone, 699 LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone,
698 bool opt) const { 700 bool opt) const {
699 const intptr_t kNumInputs = 2; 701 const intptr_t kNumInputs = 2;
700 const intptr_t kNumTemps = 0; 702 const intptr_t kNumTemps = 0;
701 if (operation_cid() == kDoubleCid) { 703 if (operation_cid() == kDoubleCid) {
702 LocationSummary* summary = new(zone) LocationSummary( 704 LocationSummary* summary = new(zone) LocationSummary(
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
739 // Special case for NaN comparison. Result is always false unless 741 // Special case for NaN comparison. Result is always false unless
740 // relational operator is !=. 742 // relational operator is !=.
741 __ b(&is_false, VS); 743 __ b(&is_false, VS);
742 } 744 }
743 EmitBranchOnCondition(compiler, true_condition, labels); 745 EmitBranchOnCondition(compiler, true_condition, labels);
744 // TODO(zra): instead of branching, use the csel instruction to get 746 // TODO(zra): instead of branching, use the csel instruction to get
745 // True or False into result. 747 // True or False into result.
746 const Register result = locs()->out(0).reg(); 748 const Register result = locs()->out(0).reg();
747 Label done; 749 Label done;
748 __ Bind(&is_false); 750 __ Bind(&is_false);
749 __ LoadObject(result, Bool::False(), PP); 751 __ LoadObject(result, Bool::False());
750 __ b(&done); 752 __ b(&done);
751 __ Bind(&is_true); 753 __ Bind(&is_true);
752 __ LoadObject(result, Bool::True(), PP); 754 __ LoadObject(result, Bool::True());
753 __ Bind(&done); 755 __ Bind(&done);
754 } 756 }
755 757
756 758
757 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler, 759 void RelationalOpInstr::EmitBranchCode(FlowGraphCompiler* compiler,
758 BranchInstr* branch) { 760 BranchInstr* branch) {
759 BranchLabels labels = compiler->CreateBranchLabels(branch); 761 BranchLabels labels = compiler->CreateBranchLabels(branch);
760 Condition true_condition = EmitComparisonCode(compiler, labels); 762 Condition true_condition = EmitComparisonCode(compiler, labels);
761 if ((operation_cid() == kDoubleCid) && (true_condition != NE)) { 763 if ((operation_cid() == kDoubleCid) && (true_condition != NE)) {
762 // Special case for NaN comparison. Result is always false unless 764 // Special case for NaN comparison. Result is always false unless
763 // relational operator is !=. 765 // relational operator is !=.
764 __ b(labels.false_label, VS); 766 __ b(labels.false_label, VS);
765 } 767 }
766 EmitBranchOnCondition(compiler, true_condition, labels); 768 EmitBranchOnCondition(compiler, true_condition, labels);
767 } 769 }
768 770
769 771
770 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone, 772 LocationSummary* NativeCallInstr::MakeLocationSummary(Zone* zone,
771 bool opt) const { 773 bool opt) const {
772 return MakeCallSummary(zone); 774 return MakeCallSummary(zone);
773 } 775 }
774 776
775 777
776 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 778 void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
777 const Register result = locs()->out(0).reg(); 779 const Register result = locs()->out(0).reg();
778 780
779 // Push the result place holder initialized to NULL. 781 // Push the result place holder initialized to NULL.
780 __ PushObject(Object::null_object(), PP); 782 __ PushObject(Object::null_object());
781 // Pass a pointer to the first argument in R2. 783 // Pass a pointer to the first argument in R2.
782 if (!function().HasOptionalParameters()) { 784 if (!function().HasOptionalParameters()) {
783 __ AddImmediate(R2, FP, (kParamEndSlotFromFp + 785 __ AddImmediate(R2, FP, (kParamEndSlotFromFp +
784 function().NumParameters()) * kWordSize, PP); 786 function().NumParameters()) * kWordSize);
785 } else { 787 } else {
786 __ AddImmediate(R2, FP, kFirstLocalSlotFromFp * kWordSize, PP); 788 __ AddImmediate(R2, FP, kFirstLocalSlotFromFp * kWordSize);
787 } 789 }
788 // Compute the effective address. When running under the simulator, 790 // Compute the effective address. When running under the simulator,
789 // this is a redirection address that forces the simulator to call 791 // this is a redirection address that forces the simulator to call
790 // into the runtime system. 792 // into the runtime system.
791 uword entry = reinterpret_cast<uword>(native_c_function()); 793 uword entry = reinterpret_cast<uword>(native_c_function());
792 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); 794 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function());
793 const bool is_leaf_call = 795 const bool is_leaf_call =
794 (argc_tag & NativeArguments::AutoSetupScopeMask()) == 0; 796 (argc_tag & NativeArguments::AutoSetupScopeMask()) == 0;
795 const ExternalLabel* stub_entry; 797 const ExternalLabel* stub_entry;
796 if (is_bootstrap_native() || is_leaf_call) { 798 if (is_bootstrap_native() || is_leaf_call) {
797 stub_entry = &StubCode::CallBootstrapCFunctionLabel(); 799 stub_entry = &StubCode::CallBootstrapCFunctionLabel();
798 #if defined(USING_SIMULATOR) 800 #if defined(USING_SIMULATOR)
799 entry = Simulator::RedirectExternalReference( 801 entry = Simulator::RedirectExternalReference(
800 entry, Simulator::kBootstrapNativeCall, function().NumParameters()); 802 entry, Simulator::kBootstrapNativeCall, function().NumParameters());
801 #endif 803 #endif
802 } else { 804 } else {
803 // In the case of non bootstrap native methods the CallNativeCFunction 805 // In the case of non bootstrap native methods the CallNativeCFunction
804 // stub generates the redirection address when running under the simulator 806 // stub generates the redirection address when running under the simulator
805 // and hence we do not change 'entry' here. 807 // and hence we do not change 'entry' here.
806 stub_entry = &StubCode::CallNativeCFunctionLabel(); 808 stub_entry = &StubCode::CallNativeCFunctionLabel();
807 #if defined(USING_SIMULATOR) 809 #if defined(USING_SIMULATOR)
808 if (!function().IsNativeAutoSetupScope()) { 810 if (!function().IsNativeAutoSetupScope()) {
809 entry = Simulator::RedirectExternalReference( 811 entry = Simulator::RedirectExternalReference(
810 entry, Simulator::kBootstrapNativeCall, function().NumParameters()); 812 entry, Simulator::kBootstrapNativeCall, function().NumParameters());
811 } 813 }
812 #endif 814 #endif
813 } 815 }
814 __ LoadImmediate(R5, entry, PP); 816 __ LoadImmediate(R5, entry);
815 __ LoadImmediate(R1, argc_tag, PP); 817 __ LoadImmediate(R1, argc_tag);
816 compiler->GenerateCall(token_pos(), 818 compiler->GenerateCall(token_pos(),
817 stub_entry, 819 stub_entry,
818 RawPcDescriptors::kOther, 820 RawPcDescriptors::kOther,
819 locs()); 821 locs());
820 __ Pop(result); 822 __ Pop(result);
821 } 823 }
822 824
823 825
824 LocationSummary* StringFromCharCodeInstr::MakeLocationSummary(Zone* zone, 826 LocationSummary* StringFromCharCodeInstr::MakeLocationSummary(Zone* zone,
825 bool opt) const { 827 bool opt) const {
826 const intptr_t kNumInputs = 1; 828 const intptr_t kNumInputs = 1;
827 // TODO(fschneider): Allow immediate operands for the char code. 829 // TODO(fschneider): Allow immediate operands for the char code.
828 return LocationSummary::Make(zone, 830 return LocationSummary::Make(zone,
829 kNumInputs, 831 kNumInputs,
830 Location::RequiresRegister(), 832 Location::RequiresRegister(),
831 LocationSummary::kNoCall); 833 LocationSummary::kNoCall);
832 } 834 }
833 835
834 836
835 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 837 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
836 ASSERT(compiler->is_optimizing()); 838 ASSERT(compiler->is_optimizing());
837 const Register char_code = locs()->in(0).reg(); 839 const Register char_code = locs()->in(0).reg();
838 const Register result = locs()->out(0).reg(); 840 const Register result = locs()->out(0).reg();
839 __ LoadImmediate( 841 __ LoadImmediate(
840 result, reinterpret_cast<uword>(Symbols::PredefinedAddress()), PP); 842 result, reinterpret_cast<uword>(Symbols::PredefinedAddress()));
841 __ AddImmediate( 843 __ AddImmediate(
842 result, result, Symbols::kNullCharCodeSymbolOffset * kWordSize, PP); 844 result, result, Symbols::kNullCharCodeSymbolOffset * kWordSize);
843 __ SmiUntag(TMP, char_code); // Untag to use scaled adress mode. 845 __ SmiUntag(TMP, char_code); // Untag to use scaled adress mode.
844 __ ldr(result, Address(result, TMP, UXTX, Address::Scaled)); 846 __ ldr(result, Address(result, TMP, UXTX, Address::Scaled));
845 } 847 }
846 848
847 849
848 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone, 850 LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone,
849 bool opt) const { 851 bool opt) const {
850 const intptr_t kNumInputs = 1; 852 const intptr_t kNumInputs = 1;
851 return LocationSummary::Make(zone, 853 return LocationSummary::Make(zone,
852 kNumInputs, 854 kNumInputs,
853 Location::RequiresRegister(), 855 Location::RequiresRegister(),
854 LocationSummary::kNoCall); 856 LocationSummary::kNoCall);
855 } 857 }
856 858
857 859
858 void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 860 void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
859 ASSERT(cid_ == kOneByteStringCid); 861 ASSERT(cid_ == kOneByteStringCid);
860 const Register str = locs()->in(0).reg(); 862 const Register str = locs()->in(0).reg();
861 const Register result = locs()->out(0).reg(); 863 const Register result = locs()->out(0).reg();
862 __ LoadFieldFromOffset(result, str, String::length_offset(), PP); 864 __ LoadFieldFromOffset(result, str, String::length_offset());
863 __ ldr(TMP, FieldAddress(str, OneByteString::data_offset()), kUnsignedByte); 865 __ ldr(TMP, FieldAddress(str, OneByteString::data_offset()), kUnsignedByte);
864 __ CompareImmediate(result, Smi::RawValue(1), PP); 866 __ CompareImmediate(result, Smi::RawValue(1));
865 __ LoadImmediate(result, -1, PP); 867 __ LoadImmediate(result, -1);
866 __ csel(result, TMP, result, EQ); 868 __ csel(result, TMP, result, EQ);
867 __ SmiTag(result); 869 __ SmiTag(result);
868 } 870 }
869 871
870 872
871 LocationSummary* StringInterpolateInstr::MakeLocationSummary(Zone* zone, 873 LocationSummary* StringInterpolateInstr::MakeLocationSummary(Zone* zone,
872 bool opt) const { 874 bool opt) const {
873 const intptr_t kNumInputs = 1; 875 const intptr_t kNumInputs = 1;
874 const intptr_t kNumTemps = 0; 876 const intptr_t kNumTemps = 0;
875 LocationSummary* summary = new(zone) LocationSummary( 877 LocationSummary* summary = new(zone) LocationSummary(
(...skipping 27 matching lines...) Expand all
903 kNumInputs, 905 kNumInputs,
904 Location::RequiresRegister(), 906 Location::RequiresRegister(),
905 LocationSummary::kNoCall); 907 LocationSummary::kNoCall);
906 } 908 }
907 909
908 910
909 void LoadUntaggedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 911 void LoadUntaggedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
910 const Register obj = locs()->in(0).reg(); 912 const Register obj = locs()->in(0).reg();
911 const Register result = locs()->out(0).reg(); 913 const Register result = locs()->out(0).reg();
912 if (object()->definition()->representation() == kUntagged) { 914 if (object()->definition()->representation() == kUntagged) {
913 __ LoadFromOffset(result, obj, offset(), PP); 915 __ LoadFromOffset(result, obj, offset());
914 } else { 916 } else {
915 ASSERT(object()->definition()->representation() == kTagged); 917 ASSERT(object()->definition()->representation() == kTagged);
916 __ LoadFieldFromOffset(result, obj, offset(), PP); 918 __ LoadFieldFromOffset(result, obj, offset());
917 } 919 }
918 } 920 }
919 921
920 922
921 LocationSummary* LoadClassIdInstr::MakeLocationSummary(Zone* zone, 923 LocationSummary* LoadClassIdInstr::MakeLocationSummary(Zone* zone,
922 bool opt) const { 924 bool opt) const {
923 const intptr_t kNumInputs = 1; 925 const intptr_t kNumInputs = 1;
924 return LocationSummary::Make(zone, 926 return LocationSummary::Make(zone,
925 kNumInputs, 927 kNumInputs,
926 Location::RequiresRegister(), 928 Location::RequiresRegister(),
927 LocationSummary::kNoCall); 929 LocationSummary::kNoCall);
928 } 930 }
929 931
930 932
931 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 933 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
932 const Register object = locs()->in(0).reg(); 934 const Register object = locs()->in(0).reg();
933 const Register result = locs()->out(0).reg(); 935 const Register result = locs()->out(0).reg();
934 static const intptr_t kSmiCidSource = 936 static const intptr_t kSmiCidSource =
935 static_cast<intptr_t>(kSmiCid) << RawObject::kClassIdTagPos; 937 static_cast<intptr_t>(kSmiCid) << RawObject::kClassIdTagPos;
936 938
937 __ LoadImmediate(TMP, reinterpret_cast<int64_t>(&kSmiCidSource) + 1, PP); 939 __ LoadImmediate(TMP, reinterpret_cast<int64_t>(&kSmiCidSource) + 1);
938 __ tsti(object, Immediate(kSmiTagMask)); 940 __ tsti(object, Immediate(kSmiTagMask));
939 __ csel(TMP, TMP, object, EQ); 941 __ csel(TMP, TMP, object, EQ);
940 __ LoadClassId(result, TMP, PP); 942 __ LoadClassId(result, TMP);
941 __ SmiTag(result); 943 __ SmiTag(result);
942 } 944 }
943 945
944 946
945 CompileType LoadIndexedInstr::ComputeType() const { 947 CompileType LoadIndexedInstr::ComputeType() const {
946 switch (class_id_) { 948 switch (class_id_) {
947 case kArrayCid: 949 case kArrayCid:
948 case kImmutableArrayCid: 950 case kImmutableArrayCid:
949 return CompileType::Dynamic(); 951 return CompileType::Dynamic();
950 952
(...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after
1304 const Register value = locs()->in(2).reg(); 1306 const Register value = locs()->in(2).reg();
1305 __ StoreIntoObjectNoBarrier(array, element_address, value); 1307 __ StoreIntoObjectNoBarrier(array, element_address, value);
1306 } 1308 }
1307 break; 1309 break;
1308 case kTypedDataInt8ArrayCid: 1310 case kTypedDataInt8ArrayCid:
1309 case kTypedDataUint8ArrayCid: 1311 case kTypedDataUint8ArrayCid:
1310 case kExternalTypedDataUint8ArrayCid: 1312 case kExternalTypedDataUint8ArrayCid:
1311 case kOneByteStringCid: { 1313 case kOneByteStringCid: {
1312 if (locs()->in(2).IsConstant()) { 1314 if (locs()->in(2).IsConstant()) {
1313 const Smi& constant = Smi::Cast(locs()->in(2).constant()); 1315 const Smi& constant = Smi::Cast(locs()->in(2).constant());
1314 __ LoadImmediate(TMP, static_cast<int8_t>(constant.Value()), PP); 1316 __ LoadImmediate(TMP, static_cast<int8_t>(constant.Value()));
1315 __ str(TMP, element_address, kUnsignedByte); 1317 __ str(TMP, element_address, kUnsignedByte);
1316 } else { 1318 } else {
1317 const Register value = locs()->in(2).reg(); 1319 const Register value = locs()->in(2).reg();
1318 __ SmiUntag(TMP, value); 1320 __ SmiUntag(TMP, value);
1319 __ str(TMP, element_address, kUnsignedByte); 1321 __ str(TMP, element_address, kUnsignedByte);
1320 } 1322 }
1321 break; 1323 break;
1322 } 1324 }
1323 case kTypedDataUint8ClampedArrayCid: 1325 case kTypedDataUint8ClampedArrayCid:
1324 case kExternalTypedDataUint8ClampedArrayCid: { 1326 case kExternalTypedDataUint8ClampedArrayCid: {
1325 if (locs()->in(2).IsConstant()) { 1327 if (locs()->in(2).IsConstant()) {
1326 const Smi& constant = Smi::Cast(locs()->in(2).constant()); 1328 const Smi& constant = Smi::Cast(locs()->in(2).constant());
1327 intptr_t value = constant.Value(); 1329 intptr_t value = constant.Value();
1328 // Clamp to 0x0 or 0xFF respectively. 1330 // Clamp to 0x0 or 0xFF respectively.
1329 if (value > 0xFF) { 1331 if (value > 0xFF) {
1330 value = 0xFF; 1332 value = 0xFF;
1331 } else if (value < 0) { 1333 } else if (value < 0) {
1332 value = 0; 1334 value = 0;
1333 } 1335 }
1334 __ LoadImmediate(TMP, static_cast<int8_t>(value), PP); 1336 __ LoadImmediate(TMP, static_cast<int8_t>(value));
1335 __ str(TMP, element_address, kUnsignedByte); 1337 __ str(TMP, element_address, kUnsignedByte);
1336 } else { 1338 } else {
1337 const Register value = locs()->in(2).reg(); 1339 const Register value = locs()->in(2).reg();
1338 __ CompareImmediate(value, 0x1FE, PP); // Smi value and smi 0xFF. 1340 __ CompareImmediate(value, 0x1FE); // Smi value and smi 0xFF.
1339 // Clamp to 0x00 or 0xFF respectively. 1341 // Clamp to 0x00 or 0xFF respectively.
1340 __ csetm(TMP, GT); // TMP = value > 0x1FE ? -1 : 0. 1342 __ csetm(TMP, GT); // TMP = value > 0x1FE ? -1 : 0.
1341 __ csel(TMP, value, TMP, LS); // TMP = value in range ? value : TMP. 1343 __ csel(TMP, value, TMP, LS); // TMP = value in range ? value : TMP.
1342 __ SmiUntag(TMP); 1344 __ SmiUntag(TMP);
1343 __ str(TMP, element_address, kUnsignedByte); 1345 __ str(TMP, element_address, kUnsignedByte);
1344 } 1346 }
1345 break; 1347 break;
1346 } 1348 }
1347 case kTypedDataInt16ArrayCid: 1349 case kTypedDataInt16ArrayCid:
1348 case kTypedDataUint16ArrayCid: { 1350 case kTypedDataUint16ArrayCid: {
(...skipping 30 matching lines...) Expand all
1379 } 1381 }
1380 } 1382 }
1381 1383
1382 1384
1383 static void LoadValueCid(FlowGraphCompiler* compiler, 1385 static void LoadValueCid(FlowGraphCompiler* compiler,
1384 Register value_cid_reg, 1386 Register value_cid_reg,
1385 Register value_reg, 1387 Register value_reg,
1386 Label* value_is_smi = NULL) { 1388 Label* value_is_smi = NULL) {
1387 Label done; 1389 Label done;
1388 if (value_is_smi == NULL) { 1390 if (value_is_smi == NULL) {
1389 __ LoadImmediate(value_cid_reg, kSmiCid, PP); 1391 __ LoadImmediate(value_cid_reg, kSmiCid);
1390 } 1392 }
1391 __ tsti(value_reg, Immediate(kSmiTagMask)); 1393 __ tsti(value_reg, Immediate(kSmiTagMask));
1392 if (value_is_smi == NULL) { 1394 if (value_is_smi == NULL) {
1393 __ b(&done, EQ); 1395 __ b(&done, EQ);
1394 } else { 1396 } else {
1395 __ b(value_is_smi, EQ); 1397 __ b(value_is_smi, EQ);
1396 } 1398 }
1397 __ LoadClassId(value_cid_reg, value_reg, PP); 1399 __ LoadClassId(value_cid_reg, value_reg);
1398 __ Bind(&done); 1400 __ Bind(&done);
1399 } 1401 }
1400 1402
1401 1403
1402 LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone, 1404 LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone,
1403 bool opt) const { 1405 bool opt) const {
1404 const intptr_t kNumInputs = 1; 1406 const intptr_t kNumInputs = 1;
1405 1407
1406 const intptr_t value_cid = value()->Type()->ToCid(); 1408 const intptr_t value_cid = value()->Type()->ToCid();
1407 const intptr_t field_cid = field().guarded_cid(); 1409 const intptr_t field_cid = field().guarded_cid();
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1462 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister; 1464 locs()->temp(locs()->temp_count() - 1).reg() : kNoRegister;
1463 1465
1464 Label ok, fail_label; 1466 Label ok, fail_label;
1465 1467
1466 Label* deopt = compiler->is_optimizing() ? 1468 Label* deopt = compiler->is_optimizing() ?
1467 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL; 1469 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) : NULL;
1468 1470
1469 Label* fail = (deopt != NULL) ? deopt : &fail_label; 1471 Label* fail = (deopt != NULL) ? deopt : &fail_label;
1470 1472
1471 if (emit_full_guard) { 1473 if (emit_full_guard) {
1472 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); 1474 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()));
1473 1475
1474 FieldAddress field_cid_operand( 1476 FieldAddress field_cid_operand(
1475 field_reg, Field::guarded_cid_offset(), kUnsignedWord); 1477 field_reg, Field::guarded_cid_offset(), kUnsignedWord);
1476 FieldAddress field_nullability_operand( 1478 FieldAddress field_nullability_operand(
1477 field_reg, Field::is_nullable_offset(), kUnsignedWord); 1479 field_reg, Field::is_nullable_offset(), kUnsignedWord);
1478 1480
1479 if (value_cid == kDynamicCid) { 1481 if (value_cid == kDynamicCid) {
1480 LoadValueCid(compiler, value_cid_reg, value_reg); 1482 LoadValueCid(compiler, value_cid_reg, value_reg);
1481 Label skip_length_check; 1483 Label skip_length_check;
1482 __ ldr(TMP, field_cid_operand, kUnsignedWord); 1484 __ ldr(TMP, field_cid_operand, kUnsignedWord);
1483 __ CompareRegisters(value_cid_reg, TMP); 1485 __ CompareRegisters(value_cid_reg, TMP);
1484 __ b(&ok, EQ); 1486 __ b(&ok, EQ);
1485 __ ldr(TMP, field_nullability_operand, kUnsignedWord); 1487 __ ldr(TMP, field_nullability_operand, kUnsignedWord);
1486 __ CompareRegisters(value_cid_reg, TMP); 1488 __ CompareRegisters(value_cid_reg, TMP);
1487 } else if (value_cid == kNullCid) { 1489 } else if (value_cid == kNullCid) {
1488 __ ldr(value_cid_reg, field_nullability_operand, kUnsignedWord); 1490 __ ldr(value_cid_reg, field_nullability_operand, kUnsignedWord);
1489 __ CompareImmediate(value_cid_reg, value_cid, PP); 1491 __ CompareImmediate(value_cid_reg, value_cid);
1490 } else { 1492 } else {
1491 Label skip_length_check; 1493 Label skip_length_check;
1492 __ ldr(value_cid_reg, field_cid_operand, kUnsignedWord); 1494 __ ldr(value_cid_reg, field_cid_operand, kUnsignedWord);
1493 __ CompareImmediate(value_cid_reg, value_cid, PP); 1495 __ CompareImmediate(value_cid_reg, value_cid);
1494 } 1496 }
1495 __ b(&ok, EQ); 1497 __ b(&ok, EQ);
1496 1498
1497 // Check if the tracked state of the guarded field can be initialized 1499 // Check if the tracked state of the guarded field can be initialized
1498 // inline. If the field needs length check we fall through to runtime 1500 // inline. If the field needs length check we fall through to runtime
1499 // which is responsible for computing offset of the length field 1501 // which is responsible for computing offset of the length field
1500 // based on the class id. 1502 // based on the class id.
1501 // Length guard will be emitted separately when needed via GuardFieldLength 1503 // Length guard will be emitted separately when needed via GuardFieldLength
1502 // instruction after GuardFieldClass. 1504 // instruction after GuardFieldClass.
1503 if (!field().needs_length_check()) { 1505 if (!field().needs_length_check()) {
1504 // Uninitialized field can be handled inline. Check if the 1506 // Uninitialized field can be handled inline. Check if the
1505 // field is still unitialized. 1507 // field is still unitialized.
1506 __ ldr(TMP, field_cid_operand, kUnsignedWord); 1508 __ ldr(TMP, field_cid_operand, kUnsignedWord);
1507 __ CompareImmediate(TMP, kIllegalCid, PP); 1509 __ CompareImmediate(TMP, kIllegalCid);
1508 __ b(fail, NE); 1510 __ b(fail, NE);
1509 1511
1510 if (value_cid == kDynamicCid) { 1512 if (value_cid == kDynamicCid) {
1511 __ str(value_cid_reg, field_cid_operand, kUnsignedWord); 1513 __ str(value_cid_reg, field_cid_operand, kUnsignedWord);
1512 __ str(value_cid_reg, field_nullability_operand, kUnsignedWord); 1514 __ str(value_cid_reg, field_nullability_operand, kUnsignedWord);
1513 } else { 1515 } else {
1514 __ LoadImmediate(TMP, value_cid, PP); 1516 __ LoadImmediate(TMP, value_cid);
1515 __ str(TMP, field_cid_operand, kUnsignedWord); 1517 __ str(TMP, field_cid_operand, kUnsignedWord);
1516 __ str(TMP, field_nullability_operand, kUnsignedWord); 1518 __ str(TMP, field_nullability_operand, kUnsignedWord);
1517 } 1519 }
1518 1520
1519 if (deopt == NULL) { 1521 if (deopt == NULL) {
1520 ASSERT(!compiler->is_optimizing()); 1522 ASSERT(!compiler->is_optimizing());
1521 __ b(&ok); 1523 __ b(&ok);
1522 } 1524 }
1523 } 1525 }
1524 1526
1525 if (deopt == NULL) { 1527 if (deopt == NULL) {
1526 ASSERT(!compiler->is_optimizing()); 1528 ASSERT(!compiler->is_optimizing());
1527 __ Bind(fail); 1529 __ Bind(fail);
1528 1530
1529 __ LoadFieldFromOffset( 1531 __ LoadFieldFromOffset(
1530 TMP, field_reg, Field::guarded_cid_offset(), PP, kUnsignedWord); 1532 TMP, field_reg, Field::guarded_cid_offset(), kUnsignedWord);
1531 __ CompareImmediate(TMP, kDynamicCid, PP); 1533 __ CompareImmediate(TMP, kDynamicCid);
1532 __ b(&ok, EQ); 1534 __ b(&ok, EQ);
1533 1535
1534 __ Push(field_reg); 1536 __ Push(field_reg);
1535 __ Push(value_reg); 1537 __ Push(value_reg);
1536 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); 1538 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2);
1537 __ Drop(2); // Drop the field and the value. 1539 __ Drop(2); // Drop the field and the value.
1538 } 1540 }
1539 } else { 1541 } else {
1540 ASSERT(compiler->is_optimizing()); 1542 ASSERT(compiler->is_optimizing());
1541 ASSERT(deopt != NULL); 1543 ASSERT(deopt != NULL);
1542 1544
1543 // Field guard class has been initialized and is known. 1545 // Field guard class has been initialized and is known.
1544 if (value_cid == kDynamicCid) { 1546 if (value_cid == kDynamicCid) {
1545 // Value's class id is not known. 1547 // Value's class id is not known.
1546 __ tsti(value_reg, Immediate(kSmiTagMask)); 1548 __ tsti(value_reg, Immediate(kSmiTagMask));
1547 1549
1548 if (field_cid != kSmiCid) { 1550 if (field_cid != kSmiCid) {
1549 __ b(fail, EQ); 1551 __ b(fail, EQ);
1550 __ LoadClassId(value_cid_reg, value_reg, PP); 1552 __ LoadClassId(value_cid_reg, value_reg);
1551 __ CompareImmediate(value_cid_reg, field_cid, PP); 1553 __ CompareImmediate(value_cid_reg, field_cid);
1552 } 1554 }
1553 1555
1554 if (field().is_nullable() && (field_cid != kNullCid)) { 1556 if (field().is_nullable() && (field_cid != kNullCid)) {
1555 __ b(&ok, EQ); 1557 __ b(&ok, EQ);
1556 __ CompareObject(value_reg, Object::null_object(), PP); 1558 __ CompareObject(value_reg, Object::null_object());
1557 } 1559 }
1558 1560
1559 __ b(fail, NE); 1561 __ b(fail, NE);
1560 } else { 1562 } else {
1561 // Both value's and field's class id is known. 1563 // Both value's and field's class id is known.
1562 ASSERT((value_cid != field_cid) && (value_cid != nullability)); 1564 ASSERT((value_cid != field_cid) && (value_cid != nullability));
1563 __ b(fail); 1565 __ b(fail);
1564 } 1566 }
1565 } 1567 }
1566 __ Bind(&ok); 1568 __ Bind(&ok);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1602 const Register value_reg = locs()->in(0).reg(); 1604 const Register value_reg = locs()->in(0).reg();
1603 1605
1604 if (!compiler->is_optimizing() || 1606 if (!compiler->is_optimizing() ||
1605 (field().guarded_list_length() == Field::kUnknownFixedLength)) { 1607 (field().guarded_list_length() == Field::kUnknownFixedLength)) {
1606 const Register field_reg = locs()->temp(0).reg(); 1608 const Register field_reg = locs()->temp(0).reg();
1607 const Register offset_reg = locs()->temp(1).reg(); 1609 const Register offset_reg = locs()->temp(1).reg();
1608 const Register length_reg = locs()->temp(2).reg(); 1610 const Register length_reg = locs()->temp(2).reg();
1609 1611
1610 Label ok; 1612 Label ok;
1611 1613
1612 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()), PP); 1614 __ LoadObject(field_reg, Field::ZoneHandle(field().raw()));
1613 1615
1614 __ ldr(offset_reg, 1616 __ ldr(offset_reg,
1615 FieldAddress(field_reg, 1617 FieldAddress(field_reg,
1616 Field::guarded_list_length_in_object_offset_offset()), 1618 Field::guarded_list_length_in_object_offset_offset()),
1617 kByte); 1619 kByte);
1618 __ ldr(length_reg, FieldAddress(field_reg, 1620 __ ldr(length_reg, FieldAddress(field_reg,
1619 Field::guarded_list_length_offset())); 1621 Field::guarded_list_length_offset()));
1620 1622
1621 __ tst(offset_reg, Operand(offset_reg)); 1623 __ tst(offset_reg, Operand(offset_reg));
1622 __ b(&ok, MI); 1624 __ b(&ok, MI);
(...skipping 18 matching lines...) Expand all
1641 1643
1642 __ Bind(&ok); 1644 __ Bind(&ok);
1643 } else { 1645 } else {
1644 ASSERT(compiler->is_optimizing()); 1646 ASSERT(compiler->is_optimizing());
1645 ASSERT(field().guarded_list_length() >= 0); 1647 ASSERT(field().guarded_list_length() >= 0);
1646 ASSERT(field().guarded_list_length_in_object_offset() != 1648 ASSERT(field().guarded_list_length_in_object_offset() !=
1647 Field::kUnknownLengthOffset); 1649 Field::kUnknownLengthOffset);
1648 1650
1649 __ ldr(TMP, FieldAddress(value_reg, 1651 __ ldr(TMP, FieldAddress(value_reg,
1650 field().guarded_list_length_in_object_offset())); 1652 field().guarded_list_length_in_object_offset()));
1651 __ CompareImmediate(TMP, Smi::RawValue(field().guarded_list_length()), PP); 1653 __ CompareImmediate(TMP, Smi::RawValue(field().guarded_list_length()));
1652 __ b(deopt, NE); 1654 __ b(deopt, NE);
1653 } 1655 }
1654 } 1656 }
1655 1657
1656 1658
1657 class BoxAllocationSlowPath : public SlowPathCode { 1659 class BoxAllocationSlowPath : public SlowPathCode {
1658 public: 1660 public:
1659 BoxAllocationSlowPath(Instruction* instruction, 1661 BoxAllocationSlowPath(Instruction* instruction,
1660 const Class& cls, 1662 const Class& cls,
1661 Register result) 1663 Register result)
(...skipping 27 matching lines...) Expand all
1689 compiler->RestoreLiveRegisters(locs); 1691 compiler->RestoreLiveRegisters(locs);
1690 __ b(exit_label()); 1692 __ b(exit_label());
1691 } 1693 }
1692 1694
1693 static void Allocate(FlowGraphCompiler* compiler, 1695 static void Allocate(FlowGraphCompiler* compiler,
1694 Instruction* instruction, 1696 Instruction* instruction,
1695 const Class& cls, 1697 const Class& cls,
1696 Register result, 1698 Register result,
1697 Register temp) { 1699 Register temp) {
1698 if (compiler->intrinsic_mode()) { 1700 if (compiler->intrinsic_mode()) {
1699 __ TryAllocate(cls, 1701 __ TryAllocate(cls, compiler->intrinsic_slow_path_label(), result, temp);
1700 compiler->intrinsic_slow_path_label(),
1701 result,
1702 temp,
1703 PP);
1704 } else { 1702 } else {
1705 BoxAllocationSlowPath* slow_path = 1703 BoxAllocationSlowPath* slow_path =
1706 new BoxAllocationSlowPath(instruction, cls, result); 1704 new BoxAllocationSlowPath(instruction, cls, result);
1707 compiler->AddSlowPathCode(slow_path); 1705 compiler->AddSlowPathCode(slow_path);
1708 1706
1709 __ TryAllocate(cls, 1707 __ TryAllocate(cls, slow_path->entry_label(), result, temp);
1710 slow_path->entry_label(),
1711 result,
1712 temp,
1713 PP);
1714 __ Bind(slow_path->exit_label()); 1708 __ Bind(slow_path->exit_label());
1715 } 1709 }
1716 } 1710 }
1717 1711
1718 private: 1712 private:
1719 Instruction* instruction_; 1713 Instruction* instruction_;
1720 const Class& cls_; 1714 const Class& cls_;
1721 const Register result_; 1715 const Register result_;
1722 }; 1716 };
1723 1717
1724 1718
1725 static void EnsureMutableBox(FlowGraphCompiler* compiler, 1719 static void EnsureMutableBox(FlowGraphCompiler* compiler,
1726 StoreInstanceFieldInstr* instruction, 1720 StoreInstanceFieldInstr* instruction,
1727 Register box_reg, 1721 Register box_reg,
1728 const Class& cls, 1722 const Class& cls,
1729 Register instance_reg, 1723 Register instance_reg,
1730 intptr_t offset, 1724 intptr_t offset,
1731 Register temp) { 1725 Register temp) {
1732 Label done; 1726 Label done;
1733 __ LoadFieldFromOffset(box_reg, instance_reg, offset, PP); 1727 __ LoadFieldFromOffset(box_reg, instance_reg, offset);
1734 __ CompareObject(box_reg, Object::null_object(), PP); 1728 __ CompareObject(box_reg, Object::null_object());
1735 __ b(&done, NE); 1729 __ b(&done, NE);
1736 BoxAllocationSlowPath::Allocate( 1730 BoxAllocationSlowPath::Allocate(
1737 compiler, instruction, cls, box_reg, temp); 1731 compiler, instruction, cls, box_reg, temp);
1738 __ mov(temp, box_reg); 1732 __ mov(temp, box_reg);
1739 __ StoreIntoObjectOffset(instance_reg, offset, temp, PP); 1733 __ StoreIntoObjectOffset(instance_reg, offset, temp);
1740 __ Bind(&done); 1734 __ Bind(&done);
1741 } 1735 }
1742 1736
1743 1737
1744 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone, 1738 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
1745 bool opt) const { 1739 bool opt) const {
1746 const intptr_t kNumInputs = 2; 1740 const intptr_t kNumInputs = 2;
1747 const intptr_t kNumTemps = 1741 const intptr_t kNumTemps =
1748 (IsUnboxedStore() && opt) ? 2 : 1742 (IsUnboxedStore() && opt) ? 2 :
1749 ((IsPotentialUnboxedStore()) ? 2 : 0); 1743 ((IsPotentialUnboxedStore()) ? 2 : 0);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1797 break; 1791 break;
1798 case kFloat64x2Cid: 1792 case kFloat64x2Cid:
1799 cls = &compiler->float64x2_class(); 1793 cls = &compiler->float64x2_class();
1800 break; 1794 break;
1801 default: 1795 default:
1802 UNREACHABLE(); 1796 UNREACHABLE();
1803 } 1797 }
1804 1798
1805 BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp, temp2); 1799 BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp, temp2);
1806 __ mov(temp2, temp); 1800 __ mov(temp2, temp);
1807 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2, PP); 1801 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2);
1808 } else { 1802 } else {
1809 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_, PP); 1803 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_);
1810 } 1804 }
1811 switch (cid) { 1805 switch (cid) {
1812 case kDoubleCid: 1806 case kDoubleCid:
1813 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); 1807 __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
1814 __ StoreDFieldToOffset(value, temp, Double::value_offset(), PP); 1808 __ StoreDFieldToOffset(value, temp, Double::value_offset());
1815 break; 1809 break;
1816 case kFloat32x4Cid: 1810 case kFloat32x4Cid:
1817 __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr"); 1811 __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
1818 __ StoreQFieldToOffset(value, temp, Float32x4::value_offset(), PP); 1812 __ StoreQFieldToOffset(value, temp, Float32x4::value_offset());
1819 break; 1813 break;
1820 case kFloat64x2Cid: 1814 case kFloat64x2Cid:
1821 __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr"); 1815 __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
1822 __ StoreQFieldToOffset(value, temp, Float64x2::value_offset(), PP); 1816 __ StoreQFieldToOffset(value, temp, Float64x2::value_offset());
1823 break; 1817 break;
1824 default: 1818 default:
1825 UNREACHABLE(); 1819 UNREACHABLE();
1826 } 1820 }
1827 1821
1828 return; 1822 return;
1829 } 1823 }
1830 1824
1831 if (IsPotentialUnboxedStore()) { 1825 if (IsPotentialUnboxedStore()) {
1832 const Register value_reg = locs()->in(1).reg(); 1826 const Register value_reg = locs()->in(1).reg();
1833 const Register temp = locs()->temp(0).reg(); 1827 const Register temp = locs()->temp(0).reg();
1834 const Register temp2 = locs()->temp(1).reg(); 1828 const Register temp2 = locs()->temp(1).reg();
1835 1829
1836 if (ShouldEmitStoreBarrier()) { 1830 if (ShouldEmitStoreBarrier()) {
1837 // Value input is a writable register and should be manually preserved 1831 // Value input is a writable register and should be manually preserved
1838 // across allocation slow-path. 1832 // across allocation slow-path.
1839 locs()->live_registers()->Add(locs()->in(1), kTagged); 1833 locs()->live_registers()->Add(locs()->in(1), kTagged);
1840 } 1834 }
1841 1835
1842 Label store_pointer; 1836 Label store_pointer;
1843 Label store_double; 1837 Label store_double;
1844 Label store_float32x4; 1838 Label store_float32x4;
1845 Label store_float64x2; 1839 Label store_float64x2;
1846 1840
1847 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); 1841 __ LoadObject(temp, Field::ZoneHandle(field().raw()));
1848 1842
1849 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset(), PP, 1843 __ LoadFieldFromOffset(temp2, temp, Field::is_nullable_offset(),
1850 kUnsignedWord); 1844 kUnsignedWord);
1851 __ CompareImmediate(temp2, kNullCid, PP); 1845 __ CompareImmediate(temp2, kNullCid);
1852 __ b(&store_pointer, EQ); 1846 __ b(&store_pointer, EQ);
1853 1847
1854 __ LoadFromOffset( 1848 __ LoadFromOffset(
1855 temp2, temp, Field::kind_bits_offset() - kHeapObjectTag, 1849 temp2, temp, Field::kind_bits_offset() - kHeapObjectTag,
1856 PP, kUnsignedByte); 1850 kUnsignedByte);
1857 __ tsti(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); 1851 __ tsti(temp2, Immediate(1 << Field::kUnboxingCandidateBit));
1858 __ b(&store_pointer, EQ); 1852 __ b(&store_pointer, EQ);
1859 1853
1860 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), PP, 1854 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(),
1861 kUnsignedWord); 1855 kUnsignedWord);
1862 __ CompareImmediate(temp2, kDoubleCid, PP); 1856 __ CompareImmediate(temp2, kDoubleCid);
1863 __ b(&store_double, EQ); 1857 __ b(&store_double, EQ);
1864 1858
1865 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), PP, 1859 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(),
1866 kUnsignedWord); 1860 kUnsignedWord);
1867 __ CompareImmediate(temp2, kFloat32x4Cid, PP); 1861 __ CompareImmediate(temp2, kFloat32x4Cid);
1868 __ b(&store_float32x4, EQ); 1862 __ b(&store_float32x4, EQ);
1869 1863
1870 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(), PP, 1864 __ LoadFieldFromOffset(temp2, temp, Field::guarded_cid_offset(),
1871 kUnsignedWord); 1865 kUnsignedWord);
1872 __ CompareImmediate(temp2, kFloat64x2Cid, PP); 1866 __ CompareImmediate(temp2, kFloat64x2Cid);
1873 __ b(&store_float64x2, EQ); 1867 __ b(&store_float64x2, EQ);
1874 1868
1875 // Fall through. 1869 // Fall through.
1876 __ b(&store_pointer); 1870 __ b(&store_pointer);
1877 1871
1878 if (!compiler->is_optimizing()) { 1872 if (!compiler->is_optimizing()) {
1879 locs()->live_registers()->Add(locs()->in(0)); 1873 locs()->live_registers()->Add(locs()->in(0));
1880 locs()->live_registers()->Add(locs()->in(1)); 1874 locs()->live_registers()->Add(locs()->in(1));
1881 } 1875 }
1882 1876
1883 { 1877 {
1884 __ Bind(&store_double); 1878 __ Bind(&store_double);
1885 EnsureMutableBox(compiler, 1879 EnsureMutableBox(compiler,
1886 this, 1880 this,
1887 temp, 1881 temp,
1888 compiler->double_class(), 1882 compiler->double_class(),
1889 instance_reg, 1883 instance_reg,
1890 offset_in_bytes_, 1884 offset_in_bytes_,
1891 temp2); 1885 temp2);
1892 __ LoadDFieldFromOffset(VTMP, value_reg, Double::value_offset(), PP); 1886 __ LoadDFieldFromOffset(VTMP, value_reg, Double::value_offset());
1893 __ StoreDFieldToOffset(VTMP, temp, Double::value_offset(), PP); 1887 __ StoreDFieldToOffset(VTMP, temp, Double::value_offset());
1894 __ b(&skip_store); 1888 __ b(&skip_store);
1895 } 1889 }
1896 1890
1897 { 1891 {
1898 __ Bind(&store_float32x4); 1892 __ Bind(&store_float32x4);
1899 EnsureMutableBox(compiler, 1893 EnsureMutableBox(compiler,
1900 this, 1894 this,
1901 temp, 1895 temp,
1902 compiler->float32x4_class(), 1896 compiler->float32x4_class(),
1903 instance_reg, 1897 instance_reg,
1904 offset_in_bytes_, 1898 offset_in_bytes_,
1905 temp2); 1899 temp2);
1906 __ LoadQFieldFromOffset(VTMP, value_reg, Float32x4::value_offset(), PP); 1900 __ LoadQFieldFromOffset(VTMP, value_reg, Float32x4::value_offset());
1907 __ StoreQFieldToOffset(VTMP, temp, Float32x4::value_offset(), PP); 1901 __ StoreQFieldToOffset(VTMP, temp, Float32x4::value_offset());
1908 __ b(&skip_store); 1902 __ b(&skip_store);
1909 } 1903 }
1910 1904
1911 { 1905 {
1912 __ Bind(&store_float64x2); 1906 __ Bind(&store_float64x2);
1913 EnsureMutableBox(compiler, 1907 EnsureMutableBox(compiler,
1914 this, 1908 this,
1915 temp, 1909 temp,
1916 compiler->float64x2_class(), 1910 compiler->float64x2_class(),
1917 instance_reg, 1911 instance_reg,
1918 offset_in_bytes_, 1912 offset_in_bytes_,
1919 temp2); 1913 temp2);
1920 __ LoadQFieldFromOffset(VTMP, value_reg, Float64x2::value_offset(), PP); 1914 __ LoadQFieldFromOffset(VTMP, value_reg, Float64x2::value_offset());
1921 __ StoreQFieldToOffset(VTMP, temp, Float64x2::value_offset(), PP); 1915 __ StoreQFieldToOffset(VTMP, temp, Float64x2::value_offset());
1922 __ b(&skip_store); 1916 __ b(&skip_store);
1923 } 1917 }
1924 1918
1925 __ Bind(&store_pointer); 1919 __ Bind(&store_pointer);
1926 } 1920 }
1927 1921
1928 if (ShouldEmitStoreBarrier()) { 1922 if (ShouldEmitStoreBarrier()) {
1929 const Register value_reg = locs()->in(1).reg(); 1923 const Register value_reg = locs()->in(1).reg();
1930 __ StoreIntoObjectOffset( 1924 __ StoreIntoObjectOffset(
1931 instance_reg, offset_in_bytes_, value_reg, PP, CanValueBeSmi()); 1925 instance_reg, offset_in_bytes_, value_reg, CanValueBeSmi());
1932 } else { 1926 } else {
1933 if (locs()->in(1).IsConstant()) { 1927 if (locs()->in(1).IsConstant()) {
1934 __ StoreIntoObjectOffsetNoBarrier( 1928 __ StoreIntoObjectOffsetNoBarrier(
1935 instance_reg, 1929 instance_reg, offset_in_bytes_, locs()->in(1).constant());
1936 offset_in_bytes_,
1937 locs()->in(1).constant(),
1938 PP);
1939 } else { 1930 } else {
1940 const Register value_reg = locs()->in(1).reg(); 1931 const Register value_reg = locs()->in(1).reg();
1941 __ StoreIntoObjectOffsetNoBarrier( 1932 __ StoreIntoObjectOffsetNoBarrier(
1942 instance_reg, 1933 instance_reg, offset_in_bytes_, value_reg);
1943 offset_in_bytes_,
1944 value_reg,
1945 PP);
1946 } 1934 }
1947 } 1935 }
1948 __ Bind(&skip_store); 1936 __ Bind(&skip_store);
1949 } 1937 }
1950 1938
1951 1939
1952 LocationSummary* LoadStaticFieldInstr::MakeLocationSummary(Zone* zone, 1940 LocationSummary* LoadStaticFieldInstr::MakeLocationSummary(Zone* zone,
1953 bool opt) const { 1941 bool opt) const {
1954 const intptr_t kNumInputs = 1; 1942 const intptr_t kNumInputs = 1;
1955 const intptr_t kNumTemps = 0; 1943 const intptr_t kNumTemps = 0;
1956 LocationSummary* summary = new(zone) LocationSummary( 1944 LocationSummary* summary = new(zone) LocationSummary(
1957 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 1945 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
1958 summary->set_in(0, Location::RequiresRegister()); 1946 summary->set_in(0, Location::RequiresRegister());
1959 summary->set_out(0, Location::RequiresRegister()); 1947 summary->set_out(0, Location::RequiresRegister());
1960 return summary; 1948 return summary;
1961 } 1949 }
1962 1950
1963 1951
1964 // When the parser is building an implicit static getter for optimization, 1952 // When the parser is building an implicit static getter for optimization,
1965 // it can generate a function body where deoptimization ids do not line up 1953 // it can generate a function body where deoptimization ids do not line up
1966 // with the unoptimized code. 1954 // with the unoptimized code.
1967 // 1955 //
1968 // This is safe only so long as LoadStaticFieldInstr cannot deoptimize. 1956 // This is safe only so long as LoadStaticFieldInstr cannot deoptimize.
1969 void LoadStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1957 void LoadStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1970 const Register field = locs()->in(0).reg(); 1958 const Register field = locs()->in(0).reg();
1971 const Register result = locs()->out(0).reg(); 1959 const Register result = locs()->out(0).reg();
1972 __ LoadFieldFromOffset(result, field, Field::value_offset(), PP); 1960 __ LoadFieldFromOffset(result, field, Field::value_offset());
1973 } 1961 }
1974 1962
1975 1963
1976 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone, 1964 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone,
1977 bool opt) const { 1965 bool opt) const {
1978 LocationSummary* locs = new(zone) LocationSummary( 1966 LocationSummary* locs = new(zone) LocationSummary(
1979 zone, 1, 1, LocationSummary::kNoCall); 1967 zone, 1, 1, LocationSummary::kNoCall);
1980 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() 1968 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister()
1981 : Location::RequiresRegister()); 1969 : Location::RequiresRegister());
1982 locs->set_temp(0, Location::RequiresRegister()); 1970 locs->set_temp(0, Location::RequiresRegister());
1983 return locs; 1971 return locs;
1984 } 1972 }
1985 1973
1986 1974
1987 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1975 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1988 const Register value = locs()->in(0).reg(); 1976 const Register value = locs()->in(0).reg();
1989 const Register temp = locs()->temp(0).reg(); 1977 const Register temp = locs()->temp(0).reg();
1990 1978
1991 __ LoadObject(temp, field(), PP); 1979 __ LoadObject(temp, field());
1992 if (this->value()->NeedsStoreBuffer()) { 1980 if (this->value()->NeedsStoreBuffer()) {
1993 __ StoreIntoObjectOffset( 1981 __ StoreIntoObjectOffset(
1994 temp, Field::value_offset(), value, PP, CanValueBeSmi()); 1982 temp, Field::value_offset(), value, CanValueBeSmi());
1995 } else { 1983 } else {
1996 __ StoreIntoObjectOffsetNoBarrier(temp, Field::value_offset(), value, PP); 1984 __ StoreIntoObjectOffsetNoBarrier(temp, Field::value_offset(), value);
1997 } 1985 }
1998 } 1986 }
1999 1987
2000 1988
2001 LocationSummary* InstanceOfInstr::MakeLocationSummary(Zone* zone, 1989 LocationSummary* InstanceOfInstr::MakeLocationSummary(Zone* zone,
2002 bool opt) const { 1990 bool opt) const {
2003 const intptr_t kNumInputs = 3; 1991 const intptr_t kNumInputs = 3;
2004 const intptr_t kNumTemps = 0; 1992 const intptr_t kNumTemps = 0;
2005 LocationSummary* summary = new(zone) LocationSummary( 1993 LocationSummary* summary = new(zone) LocationSummary(
2006 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 1994 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2069 2057
2070 // TODO(zra): Use stp once added. 2058 // TODO(zra): Use stp once added.
2071 // Initialize all array elements to raw_null. 2059 // Initialize all array elements to raw_null.
2072 // R0: new object start as a tagged pointer. 2060 // R0: new object start as a tagged pointer.
2073 // R3: new object end address. 2061 // R3: new object end address.
2074 // R8: iterator which initially points to the start of the variable 2062 // R8: iterator which initially points to the start of the variable
2075 // data area to be initialized. 2063 // data area to be initialized.
2076 // R6: null 2064 // R6: null
2077 if (num_elements > 0) { 2065 if (num_elements > 0) {
2078 const intptr_t array_size = instance_size - sizeof(RawArray); 2066 const intptr_t array_size = instance_size - sizeof(RawArray);
2079 __ LoadObject(R6, Object::null_object(), PP); 2067 __ LoadObject(R6, Object::null_object());
2080 __ AddImmediate(R8, R0, sizeof(RawArray) - kHeapObjectTag, PP); 2068 __ AddImmediate(R8, R0, sizeof(RawArray) - kHeapObjectTag);
2081 if (array_size < (kInlineArraySize * kWordSize)) { 2069 if (array_size < (kInlineArraySize * kWordSize)) {
2082 intptr_t current_offset = 0; 2070 intptr_t current_offset = 0;
2083 while (current_offset < array_size) { 2071 while (current_offset < array_size) {
2084 __ str(R6, Address(R8, current_offset)); 2072 __ str(R6, Address(R8, current_offset));
2085 current_offset += kWordSize; 2073 current_offset += kWordSize;
2086 } 2074 }
2087 } else { 2075 } else {
2088 Label end_loop, init_loop; 2076 Label end_loop, init_loop;
2089 __ Bind(&init_loop); 2077 __ Bind(&init_loop);
2090 __ CompareRegisters(R8, R3); 2078 __ CompareRegisters(R8, R3);
2091 __ b(&end_loop, CS); 2079 __ b(&end_loop, CS);
2092 __ str(R6, Address(R8)); 2080 __ str(R6, Address(R8));
2093 __ AddImmediate(R8, R8, kWordSize, kNoPP); 2081 __ AddImmediate(R8, R8, kWordSize);
2094 __ b(&init_loop); 2082 __ b(&init_loop);
2095 __ Bind(&end_loop); 2083 __ Bind(&end_loop);
2096 } 2084 }
2097 } 2085 }
2098 __ b(done); 2086 __ b(done);
2099 } 2087 }
2100 2088
2101 2089
2102 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2090 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2103 const Register kLengthReg = R2; 2091 const Register kLengthReg = R2;
2104 const Register kElemTypeReg = R1; 2092 const Register kElemTypeReg = R1;
2105 const Register kResultReg = R0; 2093 const Register kResultReg = R0;
2106 2094
2107 ASSERT(locs()->in(kElementTypePos).reg() == kElemTypeReg); 2095 ASSERT(locs()->in(kElementTypePos).reg() == kElemTypeReg);
2108 ASSERT(locs()->in(kLengthPos).reg() == kLengthReg); 2096 ASSERT(locs()->in(kLengthPos).reg() == kLengthReg);
2109 2097
2110 if (compiler->is_optimizing() && 2098 if (compiler->is_optimizing() &&
2111 num_elements()->BindsToConstant() && 2099 num_elements()->BindsToConstant() &&
2112 num_elements()->BoundConstant().IsSmi()) { 2100 num_elements()->BoundConstant().IsSmi()) {
2113 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value(); 2101 const intptr_t length = Smi::Cast(num_elements()->BoundConstant()).Value();
2114 if ((length >= 0) && (length <= Array::kMaxElements)) { 2102 if ((length >= 0) && (length <= Array::kMaxElements)) {
2115 Label slow_path, done; 2103 Label slow_path, done;
2116 InlineArrayAllocation(compiler, length, &slow_path, &done); 2104 InlineArrayAllocation(compiler, length, &slow_path, &done);
2117 __ Bind(&slow_path); 2105 __ Bind(&slow_path);
2118 __ PushObject(Object::null_object(), PP); // Make room for the result. 2106 __ PushObject(Object::null_object()); // Make room for the result.
2119 __ Push(kLengthReg); // length. 2107 __ Push(kLengthReg); // length.
2120 __ Push(kElemTypeReg); 2108 __ Push(kElemTypeReg);
2121 compiler->GenerateRuntimeCall(token_pos(), 2109 compiler->GenerateRuntimeCall(token_pos(),
2122 deopt_id(), 2110 deopt_id(),
2123 kAllocateArrayRuntimeEntry, 2111 kAllocateArrayRuntimeEntry,
2124 2, 2112 2,
2125 locs()); 2113 locs());
2126 __ Drop(2); 2114 __ Drop(2);
2127 __ Pop(kResultReg); 2115 __ Pop(kResultReg);
2128 __ Bind(&done); 2116 __ Bind(&done);
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
2161 return locs; 2149 return locs;
2162 } 2150 }
2163 2151
2164 2152
2165 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2153 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2166 ASSERT(sizeof(classid_t) == kInt32Size); 2154 ASSERT(sizeof(classid_t) == kInt32Size);
2167 const Register instance_reg = locs()->in(0).reg(); 2155 const Register instance_reg = locs()->in(0).reg();
2168 if (IsUnboxedLoad() && compiler->is_optimizing()) { 2156 if (IsUnboxedLoad() && compiler->is_optimizing()) {
2169 const VRegister result = locs()->out(0).fpu_reg(); 2157 const VRegister result = locs()->out(0).fpu_reg();
2170 const Register temp = locs()->temp(0).reg(); 2158 const Register temp = locs()->temp(0).reg();
2171 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); 2159 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes());
2172 const intptr_t cid = field()->UnboxedFieldCid(); 2160 const intptr_t cid = field()->UnboxedFieldCid();
2173 switch (cid) { 2161 switch (cid) {
2174 case kDoubleCid: 2162 case kDoubleCid:
2175 __ Comment("UnboxedDoubleLoadFieldInstr"); 2163 __ Comment("UnboxedDoubleLoadFieldInstr");
2176 __ LoadDFieldFromOffset(result, temp, Double::value_offset(), PP); 2164 __ LoadDFieldFromOffset(result, temp, Double::value_offset());
2177 break; 2165 break;
2178 case kFloat32x4Cid: 2166 case kFloat32x4Cid:
2179 __ LoadQFieldFromOffset(result, temp, Float32x4::value_offset(), PP); 2167 __ LoadQFieldFromOffset(result, temp, Float32x4::value_offset());
2180 break; 2168 break;
2181 case kFloat64x2Cid: 2169 case kFloat64x2Cid:
2182 __ LoadQFieldFromOffset(result, temp, Float64x2::value_offset(), PP); 2170 __ LoadQFieldFromOffset(result, temp, Float64x2::value_offset());
2183 break; 2171 break;
2184 default: 2172 default:
2185 UNREACHABLE(); 2173 UNREACHABLE();
2186 } 2174 }
2187 return; 2175 return;
2188 } 2176 }
2189 2177
2190 Label done; 2178 Label done;
2191 const Register result_reg = locs()->out(0).reg(); 2179 const Register result_reg = locs()->out(0).reg();
2192 if (IsPotentialUnboxedLoad()) { 2180 if (IsPotentialUnboxedLoad()) {
2193 const Register temp = locs()->temp(0).reg(); 2181 const Register temp = locs()->temp(0).reg();
2194 2182
2195 Label load_pointer; 2183 Label load_pointer;
2196 Label load_double; 2184 Label load_double;
2197 Label load_float32x4; 2185 Label load_float32x4;
2198 Label load_float64x2; 2186 Label load_float64x2;
2199 2187
2200 __ LoadObject(result_reg, Field::ZoneHandle(field()->raw()), PP); 2188 __ LoadObject(result_reg, Field::ZoneHandle(field()->raw()));
2201 2189
2202 FieldAddress field_cid_operand( 2190 FieldAddress field_cid_operand(
2203 result_reg, Field::guarded_cid_offset(), kUnsignedWord); 2191 result_reg, Field::guarded_cid_offset(), kUnsignedWord);
2204 FieldAddress field_nullability_operand( 2192 FieldAddress field_nullability_operand(
2205 result_reg, Field::is_nullable_offset(), kUnsignedWord); 2193 result_reg, Field::is_nullable_offset(), kUnsignedWord);
2206 2194
2207 __ ldr(temp, field_nullability_operand, kUnsignedWord); 2195 __ ldr(temp, field_nullability_operand, kUnsignedWord);
2208 __ CompareImmediate(temp, kNullCid, PP); 2196 __ CompareImmediate(temp, kNullCid);
2209 __ b(&load_pointer, EQ); 2197 __ b(&load_pointer, EQ);
2210 2198
2211 __ ldr(temp, field_cid_operand, kUnsignedWord); 2199 __ ldr(temp, field_cid_operand, kUnsignedWord);
2212 __ CompareImmediate(temp, kDoubleCid, PP); 2200 __ CompareImmediate(temp, kDoubleCid);
2213 __ b(&load_double, EQ); 2201 __ b(&load_double, EQ);
2214 2202
2215 __ ldr(temp, field_cid_operand, kUnsignedWord); 2203 __ ldr(temp, field_cid_operand, kUnsignedWord);
2216 __ CompareImmediate(temp, kFloat32x4Cid, PP); 2204 __ CompareImmediate(temp, kFloat32x4Cid);
2217 __ b(&load_float32x4, EQ); 2205 __ b(&load_float32x4, EQ);
2218 2206
2219 __ ldr(temp, field_cid_operand, kUnsignedWord); 2207 __ ldr(temp, field_cid_operand, kUnsignedWord);
2220 __ CompareImmediate(temp, kFloat64x2Cid, PP); 2208 __ CompareImmediate(temp, kFloat64x2Cid);
2221 __ b(&load_float64x2, EQ); 2209 __ b(&load_float64x2, EQ);
2222 2210
2223 // Fall through. 2211 // Fall through.
2224 __ b(&load_pointer); 2212 __ b(&load_pointer);
2225 2213
2226 if (!compiler->is_optimizing()) { 2214 if (!compiler->is_optimizing()) {
2227 locs()->live_registers()->Add(locs()->in(0)); 2215 locs()->live_registers()->Add(locs()->in(0));
2228 } 2216 }
2229 2217
2230 { 2218 {
2231 __ Bind(&load_double); 2219 __ Bind(&load_double);
2232 BoxAllocationSlowPath::Allocate(compiler, 2220 BoxAllocationSlowPath::Allocate(compiler,
2233 this, 2221 this,
2234 compiler->double_class(), 2222 compiler->double_class(),
2235 result_reg, 2223 result_reg,
2236 temp); 2224 temp);
2237 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); 2225 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes());
2238 __ LoadDFieldFromOffset(VTMP, temp, Double::value_offset(), PP); 2226 __ LoadDFieldFromOffset(VTMP, temp, Double::value_offset());
2239 __ StoreDFieldToOffset(VTMP, result_reg, Double::value_offset(), PP); 2227 __ StoreDFieldToOffset(VTMP, result_reg, Double::value_offset());
2240 __ b(&done); 2228 __ b(&done);
2241 } 2229 }
2242 2230
2243 { 2231 {
2244 __ Bind(&load_float32x4); 2232 __ Bind(&load_float32x4);
2245 BoxAllocationSlowPath::Allocate(compiler, 2233 BoxAllocationSlowPath::Allocate(compiler,
2246 this, 2234 this,
2247 compiler->float32x4_class(), 2235 compiler->float32x4_class(),
2248 result_reg, 2236 result_reg,
2249 temp); 2237 temp);
2250 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); 2238 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes());
2251 __ LoadQFieldFromOffset(VTMP, temp, Float32x4::value_offset(), PP); 2239 __ LoadQFieldFromOffset(VTMP, temp, Float32x4::value_offset());
2252 __ StoreQFieldToOffset(VTMP, result_reg, Float32x4::value_offset(), PP); 2240 __ StoreQFieldToOffset(VTMP, result_reg, Float32x4::value_offset());
2253 __ b(&done); 2241 __ b(&done);
2254 } 2242 }
2255 2243
2256 { 2244 {
2257 __ Bind(&load_float64x2); 2245 __ Bind(&load_float64x2);
2258 BoxAllocationSlowPath::Allocate(compiler, 2246 BoxAllocationSlowPath::Allocate(compiler,
2259 this, 2247 this,
2260 compiler->float64x2_class(), 2248 compiler->float64x2_class(),
2261 result_reg, 2249 result_reg,
2262 temp); 2250 temp);
2263 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); 2251 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes());
2264 __ LoadQFieldFromOffset(VTMP, temp, Float64x2::value_offset(), PP); 2252 __ LoadQFieldFromOffset(VTMP, temp, Float64x2::value_offset());
2265 __ StoreQFieldToOffset(VTMP, result_reg, Float64x2::value_offset(), PP); 2253 __ StoreQFieldToOffset(VTMP, result_reg, Float64x2::value_offset());
2266 __ b(&done); 2254 __ b(&done);
2267 } 2255 }
2268 2256
2269 __ Bind(&load_pointer); 2257 __ Bind(&load_pointer);
2270 } 2258 }
2271 __ LoadFieldFromOffset(result_reg, instance_reg, offset_in_bytes(), PP); 2259 __ LoadFieldFromOffset(result_reg, instance_reg, offset_in_bytes());
2272 __ Bind(&done); 2260 __ Bind(&done);
2273 } 2261 }
2274 2262
2275 2263
2276 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(Zone* zone, 2264 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(Zone* zone,
2277 bool opt) const { 2265 bool opt) const {
2278 const intptr_t kNumInputs = 1; 2266 const intptr_t kNumInputs = 1;
2279 const intptr_t kNumTemps = 0; 2267 const intptr_t kNumTemps = 0;
2280 LocationSummary* locs = new(zone) LocationSummary( 2268 LocationSummary* locs = new(zone) LocationSummary(
2281 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 2269 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
2282 locs->set_in(0, Location::RegisterLocation(R0)); 2270 locs->set_in(0, Location::RegisterLocation(R0));
2283 locs->set_out(0, Location::RegisterLocation(R0)); 2271 locs->set_out(0, Location::RegisterLocation(R0));
2284 return locs; 2272 return locs;
2285 } 2273 }
2286 2274
2287 2275
2288 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2276 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2289 const Register instantiator_reg = locs()->in(0).reg(); 2277 const Register instantiator_reg = locs()->in(0).reg();
2290 const Register result_reg = locs()->out(0).reg(); 2278 const Register result_reg = locs()->out(0).reg();
2291 2279
2292 // 'instantiator_reg' is the instantiator TypeArguments object (or null). 2280 // 'instantiator_reg' is the instantiator TypeArguments object (or null).
2293 // A runtime call to instantiate the type is required. 2281 // A runtime call to instantiate the type is required.
2294 __ PushObject(Object::null_object(), PP); // Make room for the result. 2282 __ PushObject(Object::null_object()); // Make room for the result.
2295 __ PushObject(type(), PP); 2283 __ PushObject(type());
2296 __ Push(instantiator_reg); // Push instantiator type arguments. 2284 __ Push(instantiator_reg); // Push instantiator type arguments.
2297 compiler->GenerateRuntimeCall(token_pos(), 2285 compiler->GenerateRuntimeCall(token_pos(),
2298 deopt_id(), 2286 deopt_id(),
2299 kInstantiateTypeRuntimeEntry, 2287 kInstantiateTypeRuntimeEntry,
2300 2, 2288 2,
2301 locs()); 2289 locs());
2302 __ Drop(2); // Drop instantiator and uninstantiated type. 2290 __ Drop(2); // Drop instantiator and uninstantiated type.
2303 __ Pop(result_reg); // Pop instantiated type. 2291 __ Pop(result_reg); // Pop instantiated type.
2304 ASSERT(instantiator_reg == result_reg); 2292 ASSERT(instantiator_reg == result_reg);
2305 } 2293 }
(...skipping 21 matching lines...) Expand all
2327 // 'instantiator_reg' is the instantiator TypeArguments object (or null). 2315 // 'instantiator_reg' is the instantiator TypeArguments object (or null).
2328 ASSERT(!type_arguments().IsUninstantiatedIdentity() && 2316 ASSERT(!type_arguments().IsUninstantiatedIdentity() &&
2329 !type_arguments().CanShareInstantiatorTypeArguments( 2317 !type_arguments().CanShareInstantiatorTypeArguments(
2330 instantiator_class())); 2318 instantiator_class()));
2331 // If the instantiator is null and if the type argument vector 2319 // If the instantiator is null and if the type argument vector
2332 // instantiated from null becomes a vector of dynamic, then use null as 2320 // instantiated from null becomes a vector of dynamic, then use null as
2333 // the type arguments. 2321 // the type arguments.
2334 Label type_arguments_instantiated; 2322 Label type_arguments_instantiated;
2335 const intptr_t len = type_arguments().Length(); 2323 const intptr_t len = type_arguments().Length();
2336 if (type_arguments().IsRawInstantiatedRaw(len)) { 2324 if (type_arguments().IsRawInstantiatedRaw(len)) {
2337 __ CompareObject(instantiator_reg, Object::null_object(), PP); 2325 __ CompareObject(instantiator_reg, Object::null_object());
2338 __ b(&type_arguments_instantiated, EQ); 2326 __ b(&type_arguments_instantiated, EQ);
2339 } 2327 }
2340 2328
2341 __ LoadObject(R2, type_arguments(), PP); 2329 __ LoadObject(R2, type_arguments());
2342 __ LoadFieldFromOffset(R2, R2, TypeArguments::instantiations_offset(), PP); 2330 __ LoadFieldFromOffset(R2, R2, TypeArguments::instantiations_offset());
2343 __ AddImmediate(R2, R2, Array::data_offset() - kHeapObjectTag, PP); 2331 __ AddImmediate(R2, R2, Array::data_offset() - kHeapObjectTag);
2344 // The instantiations cache is initialized with Object::zero_array() and is 2332 // The instantiations cache is initialized with Object::zero_array() and is
2345 // therefore guaranteed to contain kNoInstantiator. No length check needed. 2333 // therefore guaranteed to contain kNoInstantiator. No length check needed.
2346 Label loop, found, slow_case; 2334 Label loop, found, slow_case;
2347 __ Bind(&loop); 2335 __ Bind(&loop);
2348 __ LoadFromOffset(R1, R2, 0 * kWordSize, PP); // Cached instantiator. 2336 __ LoadFromOffset(R1, R2, 0 * kWordSize); // Cached instantiator.
2349 __ CompareRegisters(R1, R0); 2337 __ CompareRegisters(R1, R0);
2350 __ b(&found, EQ); 2338 __ b(&found, EQ);
2351 __ AddImmediate(R2, R2, 2 * kWordSize, PP); 2339 __ AddImmediate(R2, R2, 2 * kWordSize);
2352 __ CompareImmediate(R1, Smi::RawValue(StubCode::kNoInstantiator), PP); 2340 __ CompareImmediate(R1, Smi::RawValue(StubCode::kNoInstantiator));
2353 __ b(&loop, NE); 2341 __ b(&loop, NE);
2354 __ b(&slow_case); 2342 __ b(&slow_case);
2355 __ Bind(&found); 2343 __ Bind(&found);
2356 __ LoadFromOffset(R0, R2, 1 * kWordSize, PP); // Cached instantiated args. 2344 __ LoadFromOffset(R0, R2, 1 * kWordSize); // Cached instantiated args.
2357 __ b(&type_arguments_instantiated); 2345 __ b(&type_arguments_instantiated);
2358 2346
2359 __ Bind(&slow_case); 2347 __ Bind(&slow_case);
2360 // Instantiate non-null type arguments. 2348 // Instantiate non-null type arguments.
2361 // A runtime call to instantiate the type arguments is required. 2349 // A runtime call to instantiate the type arguments is required.
2362 __ PushObject(Object::null_object(), PP); // Make room for the result. 2350 __ PushObject(Object::null_object()); // Make room for the result.
2363 __ PushObject(type_arguments(), PP); 2351 __ PushObject(type_arguments());
2364 __ Push(instantiator_reg); // Push instantiator type arguments. 2352 __ Push(instantiator_reg); // Push instantiator type arguments.
2365 compiler->GenerateRuntimeCall(token_pos(), 2353 compiler->GenerateRuntimeCall(token_pos(),
2366 deopt_id(), 2354 deopt_id(),
2367 kInstantiateTypeArgumentsRuntimeEntry, 2355 kInstantiateTypeArgumentsRuntimeEntry,
2368 2, 2356 2,
2369 locs()); 2357 locs());
2370 __ Drop(2); // Drop instantiator and uninstantiated type arguments. 2358 __ Drop(2); // Drop instantiator and uninstantiated type arguments.
2371 __ Pop(result_reg); // Pop instantiated type arguments. 2359 __ Pop(result_reg); // Pop instantiated type arguments.
2372 __ Bind(&type_arguments_instantiated); 2360 __ Bind(&type_arguments_instantiated);
2373 } 2361 }
(...skipping 23 matching lines...) Expand all
2397 2385
2398 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2386 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2399 __ Comment("AllocateContextSlowPath"); 2387 __ Comment("AllocateContextSlowPath");
2400 __ Bind(entry_label()); 2388 __ Bind(entry_label());
2401 2389
2402 LocationSummary* locs = instruction_->locs(); 2390 LocationSummary* locs = instruction_->locs();
2403 locs->live_registers()->Remove(locs->out(0)); 2391 locs->live_registers()->Remove(locs->out(0));
2404 2392
2405 compiler->SaveLiveRegisters(locs); 2393 compiler->SaveLiveRegisters(locs);
2406 2394
2407 __ LoadImmediate(R1, instruction_->num_context_variables(), PP); 2395 __ LoadImmediate(R1, instruction_->num_context_variables());
2408 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); 2396 const ExternalLabel label(StubCode::AllocateContextEntryPoint());
2409 compiler->GenerateCall(instruction_->token_pos(), 2397 compiler->GenerateCall(instruction_->token_pos(),
2410 &label, 2398 &label,
2411 RawPcDescriptors::kOther, 2399 RawPcDescriptors::kOther,
2412 locs); 2400 locs);
2413 ASSERT(instruction_->locs()->out(0).reg() == R0); 2401 ASSERT(instruction_->locs()->out(0).reg() == R0);
2414 compiler->RestoreLiveRegisters(instruction_->locs()); 2402 compiler->RestoreLiveRegisters(instruction_->locs());
2415 __ b(exit_label()); 2403 __ b(exit_label());
2416 } 2404 }
2417 2405
(...skipping 14 matching lines...) Expand all
2432 compiler->AddSlowPathCode(slow_path); 2420 compiler->AddSlowPathCode(slow_path);
2433 intptr_t instance_size = Context::InstanceSize(num_context_variables()); 2421 intptr_t instance_size = Context::InstanceSize(num_context_variables());
2434 2422
2435 __ TryAllocateArray(kContextCid, instance_size, slow_path->entry_label(), 2423 __ TryAllocateArray(kContextCid, instance_size, slow_path->entry_label(),
2436 result, // instance 2424 result, // instance
2437 temp0, 2425 temp0,
2438 temp1, 2426 temp1,
2439 temp2); 2427 temp2);
2440 2428
2441 // Setup up number of context variables field. 2429 // Setup up number of context variables field.
2442 __ LoadImmediate(temp0, num_context_variables(), PP); 2430 __ LoadImmediate(temp0, num_context_variables());
2443 __ str(temp0, FieldAddress(result, Context::num_variables_offset())); 2431 __ str(temp0, FieldAddress(result, Context::num_variables_offset()));
2444 2432
2445 __ Bind(slow_path->exit_label()); 2433 __ Bind(slow_path->exit_label());
2446 } 2434 }
2447 2435
2448 2436
2449 LocationSummary* AllocateContextInstr::MakeLocationSummary(Zone* zone, 2437 LocationSummary* AllocateContextInstr::MakeLocationSummary(Zone* zone,
2450 bool opt) const { 2438 bool opt) const {
2451 const intptr_t kNumInputs = 0; 2439 const intptr_t kNumInputs = 0;
2452 const intptr_t kNumTemps = 1; 2440 const intptr_t kNumTemps = 1;
2453 LocationSummary* locs = new(zone) LocationSummary( 2441 LocationSummary* locs = new(zone) LocationSummary(
2454 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 2442 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
2455 locs->set_temp(0, Location::RegisterLocation(R1)); 2443 locs->set_temp(0, Location::RegisterLocation(R1));
2456 locs->set_out(0, Location::RegisterLocation(R0)); 2444 locs->set_out(0, Location::RegisterLocation(R0));
2457 return locs; 2445 return locs;
2458 } 2446 }
2459 2447
2460 2448
2461 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2449 void AllocateContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2462 ASSERT(locs()->temp(0).reg() == R1); 2450 ASSERT(locs()->temp(0).reg() == R1);
2463 ASSERT(locs()->out(0).reg() == R0); 2451 ASSERT(locs()->out(0).reg() == R0);
2464 2452
2465 __ LoadImmediate(R1, num_context_variables(), PP); 2453 __ LoadImmediate(R1, num_context_variables());
2466 const ExternalLabel label(StubCode::AllocateContextEntryPoint()); 2454 const ExternalLabel label(StubCode::AllocateContextEntryPoint());
2467 compiler->GenerateCall(token_pos(), 2455 compiler->GenerateCall(token_pos(),
2468 &label, 2456 &label,
2469 RawPcDescriptors::kOther, 2457 RawPcDescriptors::kOther,
2470 locs()); 2458 locs());
2471 } 2459 }
2472 2460
2473 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone, 2461 LocationSummary* InitStaticFieldInstr::MakeLocationSummary(Zone* zone,
2474 bool opt) const { 2462 bool opt) const {
2475 const intptr_t kNumInputs = 1; 2463 const intptr_t kNumInputs = 1;
2476 const intptr_t kNumTemps = 1; 2464 const intptr_t kNumTemps = 1;
2477 LocationSummary* locs = new(zone) LocationSummary( 2465 LocationSummary* locs = new(zone) LocationSummary(
2478 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 2466 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
2479 locs->set_in(0, Location::RegisterLocation(R0)); 2467 locs->set_in(0, Location::RegisterLocation(R0));
2480 locs->set_temp(0, Location::RegisterLocation(R1)); 2468 locs->set_temp(0, Location::RegisterLocation(R1));
2481 return locs; 2469 return locs;
2482 } 2470 }
2483 2471
2484 2472
2485 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2473 void InitStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2486 Register field = locs()->in(0).reg(); 2474 Register field = locs()->in(0).reg();
2487 Register temp = locs()->temp(0).reg(); 2475 Register temp = locs()->temp(0).reg();
2488 Label call_runtime, no_call; 2476 Label call_runtime, no_call;
2489 2477
2490 __ ldr(temp, FieldAddress(field, Field::value_offset())); 2478 __ ldr(temp, FieldAddress(field, Field::value_offset()));
2491 __ CompareObject(temp, Object::sentinel(), PP); 2479 __ CompareObject(temp, Object::sentinel());
2492 __ b(&call_runtime, EQ); 2480 __ b(&call_runtime, EQ);
2493 2481
2494 __ CompareObject(temp, Object::transition_sentinel(), PP); 2482 __ CompareObject(temp, Object::transition_sentinel());
2495 __ b(&no_call, NE); 2483 __ b(&no_call, NE);
2496 2484
2497 __ Bind(&call_runtime); 2485 __ Bind(&call_runtime);
2498 __ PushObject(Object::null_object(), PP); // Make room for (unused) result. 2486 __ PushObject(Object::null_object()); // Make room for (unused) result.
2499 __ Push(field); 2487 __ Push(field);
2500 compiler->GenerateRuntimeCall(token_pos(), 2488 compiler->GenerateRuntimeCall(token_pos(),
2501 deopt_id(), 2489 deopt_id(),
2502 kInitStaticFieldRuntimeEntry, 2490 kInitStaticFieldRuntimeEntry,
2503 1, 2491 1,
2504 locs()); 2492 locs());
2505 __ Drop(2); // Remove argument and result placeholder. 2493 __ Drop(2); // Remove argument and result placeholder.
2506 __ Bind(&no_call); 2494 __ Bind(&no_call);
2507 } 2495 }
2508 2496
2509 2497
2510 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone, 2498 LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
2511 bool opt) const { 2499 bool opt) const {
2512 const intptr_t kNumInputs = 1; 2500 const intptr_t kNumInputs = 1;
2513 const intptr_t kNumTemps = 0; 2501 const intptr_t kNumTemps = 0;
2514 LocationSummary* locs = new(zone) LocationSummary( 2502 LocationSummary* locs = new(zone) LocationSummary(
2515 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 2503 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
2516 locs->set_in(0, Location::RegisterLocation(R0)); 2504 locs->set_in(0, Location::RegisterLocation(R0));
2517 locs->set_out(0, Location::RegisterLocation(R0)); 2505 locs->set_out(0, Location::RegisterLocation(R0));
2518 return locs; 2506 return locs;
2519 } 2507 }
2520 2508
2521 2509
2522 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2510 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2523 const Register context_value = locs()->in(0).reg(); 2511 const Register context_value = locs()->in(0).reg();
2524 const Register result = locs()->out(0).reg(); 2512 const Register result = locs()->out(0).reg();
2525 2513
2526 __ PushObject(Object::null_object(), PP); // Make room for the result. 2514 __ PushObject(Object::null_object()); // Make room for the result.
2527 __ Push(context_value); 2515 __ Push(context_value);
2528 compiler->GenerateRuntimeCall(token_pos(), 2516 compiler->GenerateRuntimeCall(token_pos(),
2529 deopt_id(), 2517 deopt_id(),
2530 kCloneContextRuntimeEntry, 2518 kCloneContextRuntimeEntry,
2531 1, 2519 1,
2532 locs()); 2520 locs());
2533 __ Drop(1); // Remove argument. 2521 __ Drop(1); // Remove argument.
2534 __ Pop(result); // Get result (cloned context). 2522 __ Pop(result); // Get result (cloned context).
2535 } 2523 }
2536 2524
2537 2525
2538 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone, 2526 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
2539 bool opt) const { 2527 bool opt) const {
2540 UNREACHABLE(); 2528 UNREACHABLE();
2541 return NULL; 2529 return NULL;
2542 } 2530 }
2543 2531
2544 2532
2545 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2533 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2546 __ Bind(compiler->GetJumpLabel(this)); 2534 __ Bind(compiler->GetJumpLabel(this));
2547 compiler->AddExceptionHandler(catch_try_index(), 2535 compiler->AddExceptionHandler(catch_try_index(),
2548 try_index(), 2536 try_index(),
2549 compiler->assembler()->CodeSize(), 2537 compiler->assembler()->CodeSize(),
2550 catch_handler_types_, 2538 catch_handler_types_,
2551 needs_stacktrace()); 2539 needs_stacktrace());
2552 2540
2553 // Restore the pool pointer. 2541 // Restore the pool pointer.
2554 __ LoadPoolPointer(PP); 2542 __ LoadPoolPointer();
2555 2543
2556 if (HasParallelMove()) { 2544 if (HasParallelMove()) {
2557 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); 2545 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
2558 } 2546 }
2559 2547
2560 // Restore SP from FP as we are coming from a throw and the code for 2548 // Restore SP from FP as we are coming from a throw and the code for
2561 // popping arguments has not been run. 2549 // popping arguments has not been run.
2562 const intptr_t fp_sp_dist = 2550 const intptr_t fp_sp_dist =
2563 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; 2551 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize;
2564 ASSERT(fp_sp_dist <= 0); 2552 ASSERT(fp_sp_dist <= 0);
2565 __ AddImmediate(SP, FP, fp_sp_dist, PP); 2553 __ AddImmediate(SP, FP, fp_sp_dist);
2566 2554
2567 // Restore stack and initialize the two exception variables: 2555 // Restore stack and initialize the two exception variables:
2568 // exception and stack trace variables. 2556 // exception and stack trace variables.
2569 __ StoreToOffset(kExceptionObjectReg, 2557 __ StoreToOffset(kExceptionObjectReg,
2570 FP, exception_var().index() * kWordSize, PP); 2558 FP, exception_var().index() * kWordSize);
2571 __ StoreToOffset(kStackTraceObjectReg, 2559 __ StoreToOffset(kStackTraceObjectReg,
2572 FP, stacktrace_var().index() * kWordSize, PP); 2560 FP, stacktrace_var().index() * kWordSize);
2573 } 2561 }
2574 2562
2575 2563
2576 LocationSummary* CheckStackOverflowInstr::MakeLocationSummary(Zone* zone, 2564 LocationSummary* CheckStackOverflowInstr::MakeLocationSummary(Zone* zone,
2577 bool opt) const { 2565 bool opt) const {
2578 const intptr_t kNumInputs = 0; 2566 const intptr_t kNumInputs = 0;
2579 const intptr_t kNumTemps = 1; 2567 const intptr_t kNumTemps = 1;
2580 LocationSummary* summary = new(zone) LocationSummary( 2568 LocationSummary* summary = new(zone) LocationSummary(
2581 zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); 2569 zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath);
2582 summary->set_temp(0, Location::RequiresRegister()); 2570 summary->set_temp(0, Location::RequiresRegister());
2583 return summary; 2571 return summary;
2584 } 2572 }
2585 2573
2586 2574
2587 class CheckStackOverflowSlowPath : public SlowPathCode { 2575 class CheckStackOverflowSlowPath : public SlowPathCode {
2588 public: 2576 public:
2589 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) 2577 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction)
2590 : instruction_(instruction) { } 2578 : instruction_(instruction) { }
2591 2579
2592 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 2580 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2593 if (FLAG_use_osr && osr_entry_label()->IsLinked()) { 2581 if (FLAG_use_osr && osr_entry_label()->IsLinked()) {
2594 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); 2582 uword flags_address = Isolate::Current()->stack_overflow_flags_address();
2595 const Register value = instruction_->locs()->temp(0).reg(); 2583 const Register value = instruction_->locs()->temp(0).reg();
2596 __ Comment("CheckStackOverflowSlowPathOsr"); 2584 __ Comment("CheckStackOverflowSlowPathOsr");
2597 __ Bind(osr_entry_label()); 2585 __ Bind(osr_entry_label());
2598 __ LoadImmediate(TMP, flags_address, PP); 2586 __ LoadImmediate(TMP, flags_address);
2599 __ LoadImmediate(value, Isolate::kOsrRequest, PP); 2587 __ LoadImmediate(value, Isolate::kOsrRequest);
2600 __ str(value, Address(TMP)); 2588 __ str(value, Address(TMP));
2601 } 2589 }
2602 __ Comment("CheckStackOverflowSlowPath"); 2590 __ Comment("CheckStackOverflowSlowPath");
2603 __ Bind(entry_label()); 2591 __ Bind(entry_label());
2604 compiler->SaveLiveRegisters(instruction_->locs()); 2592 compiler->SaveLiveRegisters(instruction_->locs());
2605 // pending_deoptimization_env_ is needed to generate a runtime call that 2593 // pending_deoptimization_env_ is needed to generate a runtime call that
2606 // may throw an exception. 2594 // may throw an exception.
2607 ASSERT(compiler->pending_deoptimization_env_ == NULL); 2595 ASSERT(compiler->pending_deoptimization_env_ == NULL);
2608 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); 2596 Environment* env = compiler->SlowPathEnvironmentFor(instruction_);
2609 compiler->pending_deoptimization_env_ = env; 2597 compiler->pending_deoptimization_env_ = env;
(...skipping 23 matching lines...) Expand all
2633 CheckStackOverflowInstr* instruction_; 2621 CheckStackOverflowInstr* instruction_;
2634 Label osr_entry_label_; 2622 Label osr_entry_label_;
2635 }; 2623 };
2636 2624
2637 2625
2638 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 2626 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
2639 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); 2627 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this);
2640 compiler->AddSlowPathCode(slow_path); 2628 compiler->AddSlowPathCode(slow_path);
2641 2629
2642 if (compiler->is_optimizing()) { 2630 if (compiler->is_optimizing()) {
2643 __ LoadImmediate(TMP, Isolate::Current()->stack_limit_address(), PP); 2631 __ LoadImmediate(TMP, Isolate::Current()->stack_limit_address());
2644 __ ldr(TMP, Address(TMP)); 2632 __ ldr(TMP, Address(TMP));
2645 } else { 2633 } else {
2646 __ LoadIsolate(TMP); 2634 __ LoadIsolate(TMP);
2647 __ ldr(TMP, Address(TMP, Isolate::stack_limit_offset())); 2635 __ ldr(TMP, Address(TMP, Isolate::stack_limit_offset()));
2648 } 2636 }
2649 __ CompareRegisters(SP, TMP); 2637 __ CompareRegisters(SP, TMP);
2650 __ b(slow_path->entry_label(), LS); 2638 __ b(slow_path->entry_label(), LS);
2651 if (compiler->CanOSRFunction() && in_loop()) { 2639 if (compiler->CanOSRFunction() && in_loop()) {
2652 const Register temp = locs()->temp(0).reg(); 2640 const Register temp = locs()->temp(0).reg();
2653 // In unoptimized code check the usage counter to trigger OSR at loop 2641 // In unoptimized code check the usage counter to trigger OSR at loop
2654 // stack checks. Use progressively higher thresholds for more deeply 2642 // stack checks. Use progressively higher thresholds for more deeply
2655 // nested loops to attempt to hit outer loops with OSR when possible. 2643 // nested loops to attempt to hit outer loops with OSR when possible.
2656 __ LoadObject(temp, compiler->parsed_function().function(), PP); 2644 __ LoadObject(temp, compiler->parsed_function().function());
2657 intptr_t threshold = 2645 intptr_t threshold =
2658 FLAG_optimization_counter_threshold * (loop_depth() + 1); 2646 FLAG_optimization_counter_threshold * (loop_depth() + 1);
2659 __ LoadFieldFromOffset( 2647 __ LoadFieldFromOffset(
2660 temp, temp, Function::usage_counter_offset(), PP, kWord); 2648 temp, temp, Function::usage_counter_offset(), kWord);
2661 __ CompareImmediate(temp, threshold, PP); 2649 __ CompareImmediate(temp, threshold);
2662 __ b(slow_path->osr_entry_label(), GE); 2650 __ b(slow_path->osr_entry_label(), GE);
2663 } 2651 }
2664 if (compiler->ForceSlowPathForStackOverflow()) { 2652 if (compiler->ForceSlowPathForStackOverflow()) {
2665 __ b(slow_path->entry_label()); 2653 __ b(slow_path->entry_label());
2666 } 2654 }
2667 __ Bind(slow_path->exit_label()); 2655 __ Bind(slow_path->exit_label());
2668 } 2656 }
2669 2657
2670 2658
2671 static void EmitJavascriptOverflowCheck(FlowGraphCompiler* compiler, 2659 static void EmitJavascriptOverflowCheck(FlowGraphCompiler* compiler,
2672 Range* range, 2660 Range* range,
2673 Label* overflow, 2661 Label* overflow,
2674 Register result) { 2662 Register result) {
2675 if (!RangeUtils::IsWithin(range, -0x20000000000000LL, 0x20000000000000LL)) { 2663 if (!RangeUtils::IsWithin(range, -0x20000000000000LL, 0x20000000000000LL)) {
2676 ASSERT(overflow != NULL); 2664 ASSERT(overflow != NULL);
2677 __ LoadImmediate(TMP, 0x20000000000000LL, PP); 2665 __ LoadImmediate(TMP, 0x20000000000000LL);
2678 __ add(TMP2, result, Operand(TMP)); 2666 __ add(TMP2, result, Operand(TMP));
2679 __ cmp(TMP2, Operand(TMP, LSL, 1)); 2667 __ cmp(TMP2, Operand(TMP, LSL, 1));
2680 __ b(overflow, HI); 2668 __ b(overflow, HI);
2681 } 2669 }
2682 } 2670 }
2683 2671
2684 2672
2685 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, 2673 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler,
2686 BinarySmiOpInstr* shift_left) { 2674 BinarySmiOpInstr* shift_left) {
2687 const LocationSummary& locs = *shift_left->locs(); 2675 const LocationSummary& locs = *shift_left->locs();
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2724 __ CompareRegisters(right, ZR); 2712 __ CompareRegisters(right, ZR);
2725 __ b(deopt, MI); 2713 __ b(deopt, MI);
2726 __ mov(result, ZR); 2714 __ mov(result, ZR);
2727 return; 2715 return;
2728 } 2716 }
2729 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int); 2717 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int);
2730 const bool right_needs_check = 2718 const bool right_needs_check =
2731 !RangeUtils::IsWithin(right_range, 0, max_right - 1); 2719 !RangeUtils::IsWithin(right_range, 0, max_right - 1);
2732 if (right_needs_check) { 2720 if (right_needs_check) {
2733 __ CompareImmediate(right, 2721 __ CompareImmediate(right,
2734 reinterpret_cast<int64_t>(Smi::New(max_right)), PP); 2722 reinterpret_cast<int64_t>(Smi::New(max_right)));
2735 __ b(deopt, CS); 2723 __ b(deopt, CS);
2736 } 2724 }
2737 __ SmiUntag(TMP, right); 2725 __ SmiUntag(TMP, right);
2738 __ lslv(result, left, TMP); 2726 __ lslv(result, left, TMP);
2739 } 2727 }
2740 if (FLAG_throw_on_javascript_int_overflow) { 2728 if (FLAG_throw_on_javascript_int_overflow) {
2741 EmitJavascriptOverflowCheck(compiler, shift_left->range(), deopt, result); 2729 EmitJavascriptOverflowCheck(compiler, shift_left->range(), deopt, result);
2742 } 2730 }
2743 return; 2731 return;
2744 } 2732 }
2745 2733
2746 const bool right_needs_check = 2734 const bool right_needs_check =
2747 !RangeUtils::IsWithin(right_range, 0, (Smi::kBits - 1)); 2735 !RangeUtils::IsWithin(right_range, 0, (Smi::kBits - 1));
2748 if (!shift_left->can_overflow()) { 2736 if (!shift_left->can_overflow()) {
2749 if (right_needs_check) { 2737 if (right_needs_check) {
2750 const bool right_may_be_negative = 2738 const bool right_may_be_negative =
2751 (right_range == NULL) || !right_range->IsPositive(); 2739 (right_range == NULL) || !right_range->IsPositive();
2752 if (right_may_be_negative) { 2740 if (right_may_be_negative) {
2753 ASSERT(shift_left->CanDeoptimize()); 2741 ASSERT(shift_left->CanDeoptimize());
2754 __ CompareRegisters(right, ZR); 2742 __ CompareRegisters(right, ZR);
2755 __ b(deopt, MI); 2743 __ b(deopt, MI);
2756 } 2744 }
2757 2745
2758 __ CompareImmediate( 2746 __ CompareImmediate(
2759 right, reinterpret_cast<int64_t>(Smi::New(Smi::kBits)), PP); 2747 right, reinterpret_cast<int64_t>(Smi::New(Smi::kBits)));
2760 __ csel(result, ZR, result, CS); 2748 __ csel(result, ZR, result, CS);
2761 __ SmiUntag(TMP, right); 2749 __ SmiUntag(TMP, right);
2762 __ lslv(TMP, left, TMP); 2750 __ lslv(TMP, left, TMP);
2763 __ csel(result, TMP, result, CC); 2751 __ csel(result, TMP, result, CC);
2764 } else { 2752 } else {
2765 __ SmiUntag(TMP, right); 2753 __ SmiUntag(TMP, right);
2766 __ lslv(result, left, TMP); 2754 __ lslv(result, left, TMP);
2767 } 2755 }
2768 } else { 2756 } else {
2769 if (right_needs_check) { 2757 if (right_needs_check) {
2770 ASSERT(shift_left->CanDeoptimize()); 2758 ASSERT(shift_left->CanDeoptimize());
2771 __ CompareImmediate( 2759 __ CompareImmediate(
2772 right, reinterpret_cast<int64_t>(Smi::New(Smi::kBits)), PP); 2760 right, reinterpret_cast<int64_t>(Smi::New(Smi::kBits)));
2773 __ b(deopt, CS); 2761 __ b(deopt, CS);
2774 } 2762 }
2775 // Left is not a constant. 2763 // Left is not a constant.
2776 // Check if count too large for handling it inlined. 2764 // Check if count too large for handling it inlined.
2777 __ SmiUntag(TMP, right); 2765 __ SmiUntag(TMP, right);
2778 // Overflow test (preserve left, right, and TMP); 2766 // Overflow test (preserve left, right, and TMP);
2779 const Register temp = locs.temp(0).reg(); 2767 const Register temp = locs.temp(0).reg();
2780 __ lslv(temp, left, TMP); 2768 __ lslv(temp, left, TMP);
2781 __ asrv(TMP2, temp, TMP); 2769 __ asrv(TMP2, temp, TMP);
2782 __ CompareRegisters(left, TMP2); 2770 __ CompareRegisters(left, TMP2);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2841 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp); 2829 deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
2842 } 2830 }
2843 2831
2844 if (locs()->in(1).IsConstant()) { 2832 if (locs()->in(1).IsConstant()) {
2845 const Object& constant = locs()->in(1).constant(); 2833 const Object& constant = locs()->in(1).constant();
2846 ASSERT(constant.IsSmi()); 2834 ASSERT(constant.IsSmi());
2847 const int64_t imm = reinterpret_cast<int64_t>(constant.raw()); 2835 const int64_t imm = reinterpret_cast<int64_t>(constant.raw());
2848 switch (op_kind()) { 2836 switch (op_kind()) {
2849 case Token::kADD: { 2837 case Token::kADD: {
2850 if (deopt == NULL) { 2838 if (deopt == NULL) {
2851 __ AddImmediate(result, left, imm, PP); 2839 __ AddImmediate(result, left, imm);
2852 } else { 2840 } else {
2853 __ AddImmediateSetFlags(result, left, imm, PP); 2841 __ AddImmediateSetFlags(result, left, imm);
2854 __ b(deopt, VS); 2842 __ b(deopt, VS);
2855 } 2843 }
2856 break; 2844 break;
2857 } 2845 }
2858 case Token::kSUB: { 2846 case Token::kSUB: {
2859 if (deopt == NULL) { 2847 if (deopt == NULL) {
2860 __ AddImmediate(result, left, -imm, PP); 2848 __ AddImmediate(result, left, -imm);
2861 } else { 2849 } else {
2862 // Negating imm and using AddImmediateSetFlags would not detect the 2850 // Negating imm and using AddImmediateSetFlags would not detect the
2863 // overflow when imm == kMinInt64. 2851 // overflow when imm == kMinInt64.
2864 __ SubImmediateSetFlags(result, left, imm, PP); 2852 __ SubImmediateSetFlags(result, left, imm);
2865 __ b(deopt, VS); 2853 __ b(deopt, VS);
2866 } 2854 }
2867 break; 2855 break;
2868 } 2856 }
2869 case Token::kMUL: { 2857 case Token::kMUL: {
2870 // Keep left value tagged and untag right value. 2858 // Keep left value tagged and untag right value.
2871 const intptr_t value = Smi::Cast(constant).Value(); 2859 const intptr_t value = Smi::Cast(constant).Value();
2872 __ LoadImmediate(TMP, value, PP); 2860 __ LoadImmediate(TMP, value);
2873 __ mul(result, left, TMP); 2861 __ mul(result, left, TMP);
2874 if (deopt != NULL) { 2862 if (deopt != NULL) {
2875 __ smulh(TMP, left, TMP); 2863 __ smulh(TMP, left, TMP);
2876 // TMP: result bits 64..127. 2864 // TMP: result bits 64..127.
2877 __ cmp(TMP, Operand(result, ASR, 63)); 2865 __ cmp(TMP, Operand(result, ASR, 63));
2878 __ b(deopt, NE); 2866 __ b(deopt, NE);
2879 } 2867 }
2880 break; 2868 break;
2881 } 2869 }
2882 case Token::kTRUNCDIV: { 2870 case Token::kTRUNCDIV: {
2883 const intptr_t value = Smi::Cast(constant).Value(); 2871 const intptr_t value = Smi::Cast(constant).Value();
2884 ASSERT(Utils::IsPowerOfTwo(Utils::Abs(value))); 2872 ASSERT(Utils::IsPowerOfTwo(Utils::Abs(value)));
2885 const intptr_t shift_count = 2873 const intptr_t shift_count =
2886 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize; 2874 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize;
2887 ASSERT(kSmiTagSize == 1); 2875 ASSERT(kSmiTagSize == 1);
2888 __ AsrImmediate(TMP, left, 63); 2876 __ AsrImmediate(TMP, left, 63);
2889 ASSERT(shift_count > 1); // 1, -1 case handled above. 2877 ASSERT(shift_count > 1); // 1, -1 case handled above.
2890 const Register temp = TMP2; 2878 const Register temp = TMP2;
2891 __ add(temp, left, Operand(TMP, LSR, 64 - shift_count)); 2879 __ add(temp, left, Operand(TMP, LSR, 64 - shift_count));
2892 ASSERT(shift_count > 0); 2880 ASSERT(shift_count > 0);
2893 __ AsrImmediate(result, temp, shift_count); 2881 __ AsrImmediate(result, temp, shift_count);
2894 if (value < 0) { 2882 if (value < 0) {
2895 __ sub(result, ZR, Operand(result)); 2883 __ sub(result, ZR, Operand(result));
2896 } 2884 }
2897 __ SmiTag(result); 2885 __ SmiTag(result);
2898 break; 2886 break;
2899 } 2887 }
2900 case Token::kBIT_AND: 2888 case Token::kBIT_AND:
2901 // No overflow check. 2889 // No overflow check.
2902 __ AndImmediate(result, left, imm, PP); 2890 __ AndImmediate(result, left, imm);
2903 break; 2891 break;
2904 case Token::kBIT_OR: 2892 case Token::kBIT_OR:
2905 // No overflow check. 2893 // No overflow check.
2906 __ OrImmediate(result, left, imm, PP); 2894 __ OrImmediate(result, left, imm);
2907 break; 2895 break;
2908 case Token::kBIT_XOR: 2896 case Token::kBIT_XOR:
2909 // No overflow check. 2897 // No overflow check.
2910 __ XorImmediate(result, left, imm, PP); 2898 __ XorImmediate(result, left, imm);
2911 break; 2899 break;
2912 case Token::kSHR: { 2900 case Token::kSHR: {
2913 // Asr operation masks the count to 6 bits. 2901 // Asr operation masks the count to 6 bits.
2914 const intptr_t kCountLimit = 0x3F; 2902 const intptr_t kCountLimit = 0x3F;
2915 intptr_t value = Smi::Cast(constant).Value(); 2903 intptr_t value = Smi::Cast(constant).Value();
2916 __ AsrImmediate( 2904 __ AsrImmediate(
2917 result, left, Utils::Minimum(value + kSmiTagSize, kCountLimit)); 2905 result, left, Utils::Minimum(value + kSmiTagSize, kCountLimit));
2918 __ SmiTag(result); 2906 __ SmiTag(result);
2919 break; 2907 break;
2920 } 2908 }
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
2984 __ b(deopt, EQ); 2972 __ b(deopt, EQ);
2985 } 2973 }
2986 const Register temp = TMP2; 2974 const Register temp = TMP2;
2987 __ SmiUntag(temp, left); 2975 __ SmiUntag(temp, left);
2988 __ SmiUntag(TMP, right); 2976 __ SmiUntag(TMP, right);
2989 2977
2990 __ sdiv(result, temp, TMP); 2978 __ sdiv(result, temp, TMP);
2991 2979
2992 // Check the corner case of dividing the 'MIN_SMI' with -1, in which 2980 // Check the corner case of dividing the 'MIN_SMI' with -1, in which
2993 // case we cannot tag the result. 2981 // case we cannot tag the result.
2994 __ CompareImmediate(result, 0x4000000000000000LL, kNoPP); 2982 __ CompareImmediate(result, 0x4000000000000000LL);
2995 __ b(deopt, EQ); 2983 __ b(deopt, EQ);
2996 __ SmiTag(result); 2984 __ SmiTag(result);
2997 break; 2985 break;
2998 } 2986 }
2999 case Token::kMOD: { 2987 case Token::kMOD: {
3000 if ((right_range == NULL) || right_range->Overlaps(0, 0)) { 2988 if ((right_range == NULL) || right_range->Overlaps(0, 0)) {
3001 // Handle divide by zero in runtime. 2989 // Handle divide by zero in runtime.
3002 __ CompareRegisters(right, ZR); 2990 __ CompareRegisters(right, ZR);
3003 __ b(deopt, EQ); 2991 __ b(deopt, EQ);
3004 } 2992 }
(...skipping 28 matching lines...) Expand all
3033 case Token::kSHR: { 3021 case Token::kSHR: {
3034 if (CanDeoptimize()) { 3022 if (CanDeoptimize()) {
3035 __ CompareRegisters(right, ZR); 3023 __ CompareRegisters(right, ZR);
3036 __ b(deopt, LT); 3024 __ b(deopt, LT);
3037 } 3025 }
3038 __ SmiUntag(TMP, right); 3026 __ SmiUntag(TMP, right);
3039 // sarl operation masks the count to 6 bits. 3027 // sarl operation masks the count to 6 bits.
3040 const intptr_t kCountLimit = 0x3F; 3028 const intptr_t kCountLimit = 0x3F;
3041 if ((right_range == NULL) || 3029 if ((right_range == NULL) ||
3042 !right_range->OnlyLessThanOrEqualTo(kCountLimit)) { 3030 !right_range->OnlyLessThanOrEqualTo(kCountLimit)) {
3043 __ LoadImmediate(TMP2, kCountLimit, PP); 3031 __ LoadImmediate(TMP2, kCountLimit);
3044 __ CompareRegisters(TMP, TMP2); 3032 __ CompareRegisters(TMP, TMP2);
3045 __ csel(TMP, TMP2, TMP, GT); 3033 __ csel(TMP, TMP2, TMP, GT);
3046 } 3034 }
3047 const Register temp = locs()->temp(0).reg(); 3035 const Register temp = locs()->temp(0).reg();
3048 __ SmiUntag(temp, left); 3036 __ SmiUntag(temp, left);
3049 __ asrv(result, temp, TMP); 3037 __ asrv(result, temp, TMP);
3050 __ SmiTag(result); 3038 __ SmiTag(result);
3051 break; 3039 break;
3052 } 3040 }
3053 case Token::kDIV: { 3041 case Token::kDIV: {
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
3128 3116
3129 BoxAllocationSlowPath::Allocate( 3117 BoxAllocationSlowPath::Allocate(
3130 compiler, 3118 compiler,
3131 this, 3119 this,
3132 compiler->BoxClassFor(from_representation()), 3120 compiler->BoxClassFor(from_representation()),
3133 out_reg, 3121 out_reg,
3134 temp_reg); 3122 temp_reg);
3135 3123
3136 switch (from_representation()) { 3124 switch (from_representation()) {
3137 case kUnboxedDouble: 3125 case kUnboxedDouble:
3138 __ StoreDFieldToOffset(value, out_reg, ValueOffset(), PP); 3126 __ StoreDFieldToOffset(value, out_reg, ValueOffset());
3139 break; 3127 break;
3140 case kUnboxedFloat32x4: 3128 case kUnboxedFloat32x4:
3141 case kUnboxedFloat64x2: 3129 case kUnboxedFloat64x2:
3142 case kUnboxedInt32x4: 3130 case kUnboxedInt32x4:
3143 __ StoreQFieldToOffset(value, out_reg, ValueOffset(), PP); 3131 __ StoreQFieldToOffset(value, out_reg, ValueOffset());
3144 break; 3132 break;
3145 default: 3133 default:
3146 UNREACHABLE(); 3134 UNREACHABLE();
3147 break; 3135 break;
3148 } 3136 }
3149 } 3137 }
3150 3138
3151 3139
3152 LocationSummary* UnboxInstr::MakeLocationSummary(Zone* zone, 3140 LocationSummary* UnboxInstr::MakeLocationSummary(Zone* zone,
3153 bool opt) const { 3141 bool opt) const {
(...skipping 11 matching lines...) Expand all
3165 const Register box = locs()->in(0).reg(); 3153 const Register box = locs()->in(0).reg();
3166 3154
3167 switch (representation()) { 3155 switch (representation()) {
3168 case kUnboxedMint: { 3156 case kUnboxedMint: {
3169 UNIMPLEMENTED(); 3157 UNIMPLEMENTED();
3170 break; 3158 break;
3171 } 3159 }
3172 3160
3173 case kUnboxedDouble: { 3161 case kUnboxedDouble: {
3174 const VRegister result = locs()->out(0).fpu_reg(); 3162 const VRegister result = locs()->out(0).fpu_reg();
3175 __ LoadDFieldFromOffset(result, box, ValueOffset(), PP); 3163 __ LoadDFieldFromOffset(result, box, ValueOffset());
3176 break; 3164 break;
3177 } 3165 }
3178 3166
3179 case kUnboxedFloat32x4: 3167 case kUnboxedFloat32x4:
3180 case kUnboxedFloat64x2: 3168 case kUnboxedFloat64x2:
3181 case kUnboxedInt32x4: { 3169 case kUnboxedInt32x4: {
3182 const VRegister result = locs()->out(0).fpu_reg(); 3170 const VRegister result = locs()->out(0).fpu_reg();
3183 __ LoadQFieldFromOffset(result, box, ValueOffset(), PP); 3171 __ LoadQFieldFromOffset(result, box, ValueOffset());
3184 break; 3172 break;
3185 } 3173 }
3186 3174
3187 default: 3175 default:
3188 UNREACHABLE(); 3176 UNREACHABLE();
3189 break; 3177 break;
3190 } 3178 }
3191 } 3179 }
3192 3180
3193 3181
(...skipping 29 matching lines...) Expand all
3223 } else if (CanConvertSmi() && (value_cid == kSmiCid)) { 3211 } else if (CanConvertSmi() && (value_cid == kSmiCid)) {
3224 EmitSmiConversion(compiler); 3212 EmitSmiConversion(compiler);
3225 } else { 3213 } else {
3226 const Register box = locs()->in(0).reg(); 3214 const Register box = locs()->in(0).reg();
3227 Label* deopt = compiler->AddDeoptStub(GetDeoptId(), 3215 Label* deopt = compiler->AddDeoptStub(GetDeoptId(),
3228 ICData::kDeoptCheckClass); 3216 ICData::kDeoptCheckClass);
3229 Label is_smi; 3217 Label is_smi;
3230 3218
3231 if ((value()->Type()->ToNullableCid() == box_cid) && 3219 if ((value()->Type()->ToNullableCid() == box_cid) &&
3232 value()->Type()->is_nullable()) { 3220 value()->Type()->is_nullable()) {
3233 __ CompareObject(box, Object::null_object(), PP); 3221 __ CompareObject(box, Object::null_object());
3234 __ b(deopt, EQ); 3222 __ b(deopt, EQ);
3235 } else { 3223 } else {
3236 __ tsti(box, Immediate(kSmiTagMask)); 3224 __ tsti(box, Immediate(kSmiTagMask));
3237 __ b(CanConvertSmi() ? &is_smi : deopt, EQ); 3225 __ b(CanConvertSmi() ? &is_smi : deopt, EQ);
3238 __ CompareClassId(box, box_cid, PP); 3226 __ CompareClassId(box, box_cid);
3239 __ b(deopt, NE); 3227 __ b(deopt, NE);
3240 } 3228 }
3241 3229
3242 EmitLoadFromBox(compiler); 3230 EmitLoadFromBox(compiler);
3243 3231
3244 if (is_smi.IsLinked()) { 3232 if (is_smi.IsLinked()) {
3245 Label done; 3233 Label done;
3246 __ b(&done); 3234 __ b(&done);
3247 __ Bind(&is_smi); 3235 __ Bind(&is_smi);
3248 EmitSmiConversion(compiler); 3236 EmitSmiConversion(compiler);
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
3304 void UnboxInteger32Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3292 void UnboxInteger32Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3305 const intptr_t value_cid = value()->Type()->ToCid(); 3293 const intptr_t value_cid = value()->Type()->ToCid();
3306 const Register out = locs()->out(0).reg(); 3294 const Register out = locs()->out(0).reg();
3307 const Register value = locs()->in(0).reg(); 3295 const Register value = locs()->in(0).reg();
3308 Label* deopt = CanDeoptimize() ? 3296 Label* deopt = CanDeoptimize() ?
3309 compiler->AddDeoptStub(GetDeoptId(), ICData::kDeoptUnboxInteger) : NULL; 3297 compiler->AddDeoptStub(GetDeoptId(), ICData::kDeoptUnboxInteger) : NULL;
3310 3298
3311 if (value_cid == kSmiCid) { 3299 if (value_cid == kSmiCid) {
3312 __ SmiUntag(out, value); 3300 __ SmiUntag(out, value);
3313 } else if (value_cid == kMintCid) { 3301 } else if (value_cid == kMintCid) {
3314 __ LoadFieldFromOffset(out, value, Mint::value_offset(), PP); 3302 __ LoadFieldFromOffset(out, value, Mint::value_offset());
3315 } else if (!CanDeoptimize()) { 3303 } else if (!CanDeoptimize()) {
3316 // Type information is not conclusive, but range analysis found 3304 // Type information is not conclusive, but range analysis found
3317 // the value to be in int64 range. Therefore it must be a smi 3305 // the value to be in int64 range. Therefore it must be a smi
3318 // or mint value. 3306 // or mint value.
3319 ASSERT(is_truncating()); 3307 ASSERT(is_truncating());
3320 Label done; 3308 Label done;
3321 __ SmiUntag(out, value); 3309 __ SmiUntag(out, value);
3322 __ TestImmediate(value, kSmiTagMask, PP); 3310 __ TestImmediate(value, kSmiTagMask);
3323 __ b(&done, EQ); 3311 __ b(&done, EQ);
3324 __ LoadFieldFromOffset(out, value, Mint::value_offset(), PP); 3312 __ LoadFieldFromOffset(out, value, Mint::value_offset());
3325 __ Bind(&done); 3313 __ Bind(&done);
3326 } else { 3314 } else {
3327 Label done; 3315 Label done;
3328 __ SmiUntag(out, value); 3316 __ SmiUntag(out, value);
3329 __ TestImmediate(value, kSmiTagMask, PP); 3317 __ TestImmediate(value, kSmiTagMask);
3330 __ b(&done, EQ); 3318 __ b(&done, EQ);
3331 __ CompareClassId(value, kMintCid, PP); 3319 __ CompareClassId(value, kMintCid);
3332 __ b(deopt, NE); 3320 __ b(deopt, NE);
3333 __ LoadFieldFromOffset(out, value, Mint::value_offset(), PP); 3321 __ LoadFieldFromOffset(out, value, Mint::value_offset());
3334 __ Bind(&done); 3322 __ Bind(&done);
3335 } 3323 }
3336 3324
3337 // TODO(vegorov): as it is implemented right now truncating unboxing would 3325 // TODO(vegorov): as it is implemented right now truncating unboxing would
3338 // leave "garbage" in the higher word. 3326 // leave "garbage" in the higher word.
3339 if (!is_truncating() && (deopt != NULL)) { 3327 if (!is_truncating() && (deopt != NULL)) {
3340 ASSERT(representation() == kUnboxedInt32); 3328 ASSERT(representation() == kUnboxedInt32);
3341 __ cmp(out, Operand(out, SXTW, 0)); 3329 __ cmp(out, Operand(out, SXTW, 0));
3342 __ b(deopt, NE); 3330 __ b(deopt, NE);
3343 } 3331 }
(...skipping 815 matching lines...) Expand 10 before | Expand all | Expand 10 after
4159 4147
4160 void Int32x4BoolConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 4148 void Int32x4BoolConstructorInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
4161 const Register v0 = locs()->in(0).reg(); 4149 const Register v0 = locs()->in(0).reg();
4162 const Register v1 = locs()->in(1).reg(); 4150 const Register v1 = locs()->in(1).reg();
4163 const Register v2 = locs()->in(2).reg(); 4151 const Register v2 = locs()->in(2).reg();
4164 const Register v3 = locs()->in(3).reg(); 4152 const Register v3 = locs()->in(3).reg();
4165 const Register temp = locs()->temp(0).reg(); 4153 const Register temp = locs()->temp(0).reg();
4166 const VRegister result = locs()->out(0).fpu_reg(); 4154 const VRegister result = locs()->out(0).fpu_reg();
4167 4155
4168 __ veor(result, result, result); 4156 __ veor(result, result, result);
4169 __ LoadImmediate(temp, 0xffffffff, PP); 4157 __ LoadImmediate(temp, 0xffffffff);
4170 __ LoadObject(TMP2, Bool::True(), PP); 4158 __ LoadObject(TMP2, Bool::True());
4171 4159
4172 // __ CompareObject(v0, Bool::True(), PP); 4160 // __ CompareObject(v0, Bool::True());
4173 __ CompareRegisters(v0, TMP2); 4161 __ CompareRegisters(v0, TMP2);
4174 __ csel(TMP, temp, ZR, EQ); 4162 __ csel(TMP, temp, ZR, EQ);
4175 __ vinsw(result, 0, TMP); 4163 __ vinsw(result, 0, TMP);
4176 4164
4177 // __ CompareObject(v1, Bool::True(), PP); 4165 // __ CompareObject(v1, Bool::True());
4178 __ CompareRegisters(v1, TMP2); 4166 __ CompareRegisters(v1, TMP2);
4179 __ csel(TMP, temp, ZR, EQ); 4167 __ csel(TMP, temp, ZR, EQ);
4180 __ vinsw(result, 1, TMP); 4168 __ vinsw(result, 1, TMP);
4181 4169
4182 // __ CompareObject(v2, Bool::True(), PP); 4170 // __ CompareObject(v2, Bool::True());
4183 __ CompareRegisters(v2, TMP2); 4171 __ CompareRegisters(v2, TMP2);
4184 __ csel(TMP, temp, ZR, EQ); 4172 __ csel(TMP, temp, ZR, EQ);
4185 __ vinsw(result, 2, TMP); 4173 __ vinsw(result, 2, TMP);
4186 4174
4187 // __ CompareObject(v3, Bool::True(), PP); 4175 // __ CompareObject(v3, Bool::True());
4188 __ CompareRegisters(v3, TMP2); 4176 __ CompareRegisters(v3, TMP2);
4189 __ csel(TMP, temp, ZR, EQ); 4177 __ csel(TMP, temp, ZR, EQ);
4190 __ vinsw(result, 3, TMP); 4178 __ vinsw(result, 3, TMP);
4191 } 4179 }
4192 4180
4193 4181
4194 LocationSummary* Int32x4GetFlagInstr::MakeLocationSummary(Zone* zone, 4182 LocationSummary* Int32x4GetFlagInstr::MakeLocationSummary(Zone* zone,
4195 bool opt) const { 4183 bool opt) const {
4196 const intptr_t kNumInputs = 1; 4184 const intptr_t kNumInputs = 1;
4197 const intptr_t kNumTemps = 0; 4185 const intptr_t kNumTemps = 0;
(...skipping 19 matching lines...) Expand all
4217 case MethodRecognizer::kInt32x4GetFlagZ: 4205 case MethodRecognizer::kInt32x4GetFlagZ:
4218 __ vmovrs(result, value, 2); 4206 __ vmovrs(result, value, 2);
4219 break; 4207 break;
4220 case MethodRecognizer::kInt32x4GetFlagW: 4208 case MethodRecognizer::kInt32x4GetFlagW:
4221 __ vmovrs(result, value, 3); 4209 __ vmovrs(result, value, 3);
4222 break; 4210 break;
4223 default: UNREACHABLE(); 4211 default: UNREACHABLE();
4224 } 4212 }
4225 4213
4226 __ tst(result, Operand(result)); 4214 __ tst(result, Operand(result));
4227 __ LoadObject(result, Bool::True(), PP); 4215 __ LoadObject(result, Bool::True());
4228 __ LoadObject(TMP, Bool::False(), PP); 4216 __ LoadObject(TMP, Bool::False());
4229 __ csel(result, TMP, result, EQ); 4217 __ csel(result, TMP, result, EQ);
4230 } 4218 }
4231 4219
4232 4220
4233 LocationSummary* Int32x4SelectInstr::MakeLocationSummary(Zone* zone, 4221 LocationSummary* Int32x4SelectInstr::MakeLocationSummary(Zone* zone,
4234 bool opt) const { 4222 bool opt) const {
4235 const intptr_t kNumInputs = 3; 4223 const intptr_t kNumInputs = 3;
4236 const intptr_t kNumTemps = 1; 4224 const intptr_t kNumTemps = 1;
4237 LocationSummary* summary = new LocationSummary( 4225 LocationSummary* summary = new LocationSummary(
4238 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 4226 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
4280 4268
4281 void Int32x4SetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 4269 void Int32x4SetFlagInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
4282 const VRegister mask = locs()->in(0).fpu_reg(); 4270 const VRegister mask = locs()->in(0).fpu_reg();
4283 const Register flag = locs()->in(1).reg(); 4271 const Register flag = locs()->in(1).reg();
4284 const VRegister result = locs()->out(0).fpu_reg(); 4272 const VRegister result = locs()->out(0).fpu_reg();
4285 4273
4286 if (result != mask) { 4274 if (result != mask) {
4287 __ vmov(result, mask); 4275 __ vmov(result, mask);
4288 } 4276 }
4289 4277
4290 __ CompareObject(flag, Bool::True(), PP); 4278 __ CompareObject(flag, Bool::True());
4291 __ LoadImmediate(TMP, 0xffffffff, PP); 4279 __ LoadImmediate(TMP, 0xffffffff);
4292 __ csel(TMP, TMP, ZR, EQ); 4280 __ csel(TMP, TMP, ZR, EQ);
4293 switch (op_kind()) { 4281 switch (op_kind()) {
4294 case MethodRecognizer::kInt32x4WithFlagX: 4282 case MethodRecognizer::kInt32x4WithFlagX:
4295 __ vinsw(result, 0, TMP); 4283 __ vinsw(result, 0, TMP);
4296 break; 4284 break;
4297 case MethodRecognizer::kInt32x4WithFlagY: 4285 case MethodRecognizer::kInt32x4WithFlagY:
4298 __ vinsw(result, 1, TMP); 4286 __ vinsw(result, 1, TMP);
4299 break; 4287 break;
4300 case MethodRecognizer::kInt32x4WithFlagZ: 4288 case MethodRecognizer::kInt32x4WithFlagZ:
4301 __ vinsw(result, 2, TMP); 4289 __ vinsw(result, 2, TMP);
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
4460 __ b(&are_equal, EQ); 4448 __ b(&are_equal, EQ);
4461 const Condition double_condition = 4449 const Condition double_condition =
4462 is_min ? TokenKindToDoubleCondition(Token::kLTE) 4450 is_min ? TokenKindToDoubleCondition(Token::kLTE)
4463 : TokenKindToDoubleCondition(Token::kGTE); 4451 : TokenKindToDoubleCondition(Token::kGTE);
4464 ASSERT(left == result); 4452 ASSERT(left == result);
4465 __ b(&done, double_condition); 4453 __ b(&done, double_condition);
4466 __ fmovdd(result, right); 4454 __ fmovdd(result, right);
4467 __ b(&done); 4455 __ b(&done);
4468 4456
4469 __ Bind(&returns_nan); 4457 __ Bind(&returns_nan);
4470 __ LoadDImmediate(result, NAN, PP); 4458 __ LoadDImmediate(result, NAN);
4471 __ b(&done); 4459 __ b(&done);
4472 4460
4473 __ Bind(&are_equal); 4461 __ Bind(&are_equal);
4474 // Check for negative zero: -0.0 is equal 0.0 but min or max must return 4462 // Check for negative zero: -0.0 is equal 0.0 but min or max must return
4475 // -0.0 or 0.0 respectively. 4463 // -0.0 or 0.0 respectively.
4476 // Check for negative left value (get the sign bit): 4464 // Check for negative left value (get the sign bit):
4477 // - min -> left is negative ? left : right. 4465 // - min -> left is negative ? left : right.
4478 // - max -> left is negative ? right : left 4466 // - max -> left is negative ? right : left
4479 // Check the sign bit. 4467 // Check the sign bit.
4480 __ fmovrd(TMP, left); // Sign bit is in bit 63 of TMP. 4468 __ fmovrd(TMP, left); // Sign bit is in bit 63 of TMP.
4481 __ CompareImmediate(TMP, 0, PP); 4469 __ CompareImmediate(TMP, 0);
4482 if (is_min) { 4470 if (is_min) {
4483 ASSERT(left == result); 4471 ASSERT(left == result);
4484 __ b(&done, LT); 4472 __ b(&done, LT);
4485 __ fmovdd(result, right); 4473 __ fmovdd(result, right);
4486 } else { 4474 } else {
4487 __ b(&done, GE); 4475 __ b(&done, GE);
4488 __ fmovdd(result, right); 4476 __ fmovdd(result, right);
4489 ASSERT(left == result); 4477 ASSERT(left == result);
4490 } 4478 }
4491 __ Bind(&done); 4479 __ Bind(&done);
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
4624 result->set_out(0, Location::RegisterLocation(R0)); 4612 result->set_out(0, Location::RegisterLocation(R0));
4625 return result; 4613 return result;
4626 } 4614 }
4627 4615
4628 4616
4629 void DoubleToIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 4617 void DoubleToIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
4630 const Register result = locs()->out(0).reg(); 4618 const Register result = locs()->out(0).reg();
4631 const Register value_obj = locs()->in(0).reg(); 4619 const Register value_obj = locs()->in(0).reg();
4632 ASSERT(result == R0); 4620 ASSERT(result == R0);
4633 ASSERT(result != value_obj); 4621 ASSERT(result != value_obj);
4634 __ LoadDFieldFromOffset(VTMP, value_obj, Double::value_offset(), PP); 4622 __ LoadDFieldFromOffset(VTMP, value_obj, Double::value_offset());
4635 4623
4636 Label do_call, done; 4624 Label do_call, done;
4637 // First check for NaN. Checking for minint after the conversion doesn't work 4625 // First check for NaN. Checking for minint after the conversion doesn't work
4638 // on ARM64 because fcvtzds gives 0 for NaN. 4626 // on ARM64 because fcvtzds gives 0 for NaN.
4639 __ fcmpd(VTMP, VTMP); 4627 __ fcmpd(VTMP, VTMP);
4640 __ b(&do_call, VS); 4628 __ b(&do_call, VS);
4641 4629
4642 __ fcvtzds(result, VTMP); 4630 __ fcvtzds(result, VTMP);
4643 // Overflow is signaled with minint. 4631 // Overflow is signaled with minint.
4644 4632
4645 // Check for overflow and that it fits into Smi. 4633 // Check for overflow and that it fits into Smi.
4646 __ CompareImmediate(result, 0xC000000000000000, PP); 4634 __ CompareImmediate(result, 0xC000000000000000);
4647 __ b(&do_call, MI); 4635 __ b(&do_call, MI);
4648 __ SmiTag(result); 4636 __ SmiTag(result);
4649 if (FLAG_throw_on_javascript_int_overflow) { 4637 if (FLAG_throw_on_javascript_int_overflow) {
4650 EmitJavascriptOverflowCheck(compiler, range(), &do_call, result); 4638 EmitJavascriptOverflowCheck(compiler, range(), &do_call, result);
4651 } 4639 }
4652 __ b(&done); 4640 __ b(&done);
4653 __ Bind(&do_call); 4641 __ Bind(&do_call);
4654 __ Push(value_obj); 4642 __ Push(value_obj);
4655 ASSERT(instance_call()->HasICData()); 4643 ASSERT(instance_call()->HasICData());
4656 const ICData& ic_data = *instance_call()->ic_data(); 4644 const ICData& ic_data = *instance_call()->ic_data();
(...skipping 29 matching lines...) Expand all
4686 const Register result = locs()->out(0).reg(); 4674 const Register result = locs()->out(0).reg();
4687 const VRegister value = locs()->in(0).fpu_reg(); 4675 const VRegister value = locs()->in(0).fpu_reg();
4688 // First check for NaN. Checking for minint after the conversion doesn't work 4676 // First check for NaN. Checking for minint after the conversion doesn't work
4689 // on ARM64 because fcvtzds gives 0 for NaN. 4677 // on ARM64 because fcvtzds gives 0 for NaN.
4690 // TODO(zra): Check spec that this is true. 4678 // TODO(zra): Check spec that this is true.
4691 __ fcmpd(value, value); 4679 __ fcmpd(value, value);
4692 __ b(deopt, VS); 4680 __ b(deopt, VS);
4693 4681
4694 __ fcvtzds(result, value); 4682 __ fcvtzds(result, value);
4695 // Check for overflow and that it fits into Smi. 4683 // Check for overflow and that it fits into Smi.
4696 __ CompareImmediate(result, 0xC000000000000000, PP); 4684 __ CompareImmediate(result, 0xC000000000000000);
4697 __ b(deopt, MI); 4685 __ b(deopt, MI);
4698 __ SmiTag(result); 4686 __ SmiTag(result);
4699 if (FLAG_throw_on_javascript_int_overflow) { 4687 if (FLAG_throw_on_javascript_int_overflow) {
4700 EmitJavascriptOverflowCheck(compiler, range(), deopt, result); 4688 EmitJavascriptOverflowCheck(compiler, range(), deopt, result);
4701 } 4689 }
4702 } 4690 }
4703 4691
4704 4692
4705 LocationSummary* DoubleToDoubleInstr::MakeLocationSummary(Zone* zone, 4693 LocationSummary* DoubleToDoubleInstr::MakeLocationSummary(Zone* zone,
4706 bool opt) const { 4694 bool opt) const {
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
4794 LocationSummary* locs = instr->locs(); 4782 LocationSummary* locs = instr->locs();
4795 4783
4796 const VRegister base = locs->in(0).fpu_reg(); 4784 const VRegister base = locs->in(0).fpu_reg();
4797 const VRegister exp = locs->in(1).fpu_reg(); 4785 const VRegister exp = locs->in(1).fpu_reg();
4798 const VRegister result = locs->out(0).fpu_reg(); 4786 const VRegister result = locs->out(0).fpu_reg();
4799 const VRegister saved_base = locs->temp(0).fpu_reg(); 4787 const VRegister saved_base = locs->temp(0).fpu_reg();
4800 ASSERT((base == result) && (result != saved_base)); 4788 ASSERT((base == result) && (result != saved_base));
4801 4789
4802 Label skip_call, try_sqrt, check_base, return_nan, do_pow; 4790 Label skip_call, try_sqrt, check_base, return_nan, do_pow;
4803 __ fmovdd(saved_base, base); 4791 __ fmovdd(saved_base, base);
4804 __ LoadDImmediate(result, 1.0, PP); 4792 __ LoadDImmediate(result, 1.0);
4805 // exponent == 0.0 -> return 1.0; 4793 // exponent == 0.0 -> return 1.0;
4806 __ fcmpdz(exp); 4794 __ fcmpdz(exp);
4807 __ b(&check_base, VS); // NaN -> check base. 4795 __ b(&check_base, VS); // NaN -> check base.
4808 __ b(&skip_call, EQ); // exp is 0.0, result is 1.0. 4796 __ b(&skip_call, EQ); // exp is 0.0, result is 1.0.
4809 4797
4810 // exponent == 1.0 ? 4798 // exponent == 1.0 ?
4811 __ fcmpd(exp, result); 4799 __ fcmpd(exp, result);
4812 Label return_base; 4800 Label return_base;
4813 __ b(&return_base, EQ); 4801 __ b(&return_base, EQ);
4814 4802
4815 // exponent == 2.0 ? 4803 // exponent == 2.0 ?
4816 __ LoadDImmediate(VTMP, 2.0, PP); 4804 __ LoadDImmediate(VTMP, 2.0);
4817 __ fcmpd(exp, VTMP); 4805 __ fcmpd(exp, VTMP);
4818 Label return_base_times_2; 4806 Label return_base_times_2;
4819 __ b(&return_base_times_2, EQ); 4807 __ b(&return_base_times_2, EQ);
4820 4808
4821 // exponent == 3.0 ? 4809 // exponent == 3.0 ?
4822 __ LoadDImmediate(VTMP, 3.0, PP); 4810 __ LoadDImmediate(VTMP, 3.0);
4823 __ fcmpd(exp, VTMP); 4811 __ fcmpd(exp, VTMP);
4824 __ b(&check_base, NE); 4812 __ b(&check_base, NE);
4825 4813
4826 // base_times_3. 4814 // base_times_3.
4827 __ fmuld(result, saved_base, saved_base); 4815 __ fmuld(result, saved_base, saved_base);
4828 __ fmuld(result, result, saved_base); 4816 __ fmuld(result, result, saved_base);
4829 __ b(&skip_call); 4817 __ b(&skip_call);
4830 4818
4831 __ Bind(&return_base); 4819 __ Bind(&return_base);
4832 __ fmovdd(result, saved_base); 4820 __ fmovdd(result, saved_base);
4833 __ b(&skip_call); 4821 __ b(&skip_call);
4834 4822
4835 __ Bind(&return_base_times_2); 4823 __ Bind(&return_base_times_2);
4836 __ fmuld(result, saved_base, saved_base); 4824 __ fmuld(result, saved_base, saved_base);
4837 __ b(&skip_call); 4825 __ b(&skip_call);
4838 4826
4839 __ Bind(&check_base); 4827 __ Bind(&check_base);
4840 // Note: 'exp' could be NaN. 4828 // Note: 'exp' could be NaN.
4841 // base == 1.0 -> return 1.0; 4829 // base == 1.0 -> return 1.0;
4842 __ fcmpd(saved_base, result); 4830 __ fcmpd(saved_base, result);
4843 __ b(&return_nan, VS); 4831 __ b(&return_nan, VS);
4844 __ b(&skip_call, EQ); // base is 1.0, result is 1.0. 4832 __ b(&skip_call, EQ); // base is 1.0, result is 1.0.
4845 4833
4846 __ fcmpd(saved_base, exp); 4834 __ fcmpd(saved_base, exp);
4847 __ b(&try_sqrt, VC); // // Neither 'exp' nor 'base' is NaN. 4835 __ b(&try_sqrt, VC); // // Neither 'exp' nor 'base' is NaN.
4848 4836
4849 __ Bind(&return_nan); 4837 __ Bind(&return_nan);
4850 __ LoadDImmediate(result, NAN, PP); 4838 __ LoadDImmediate(result, NAN);
4851 __ b(&skip_call); 4839 __ b(&skip_call);
4852 4840
4853 Label return_zero; 4841 Label return_zero;
4854 __ Bind(&try_sqrt); 4842 __ Bind(&try_sqrt);
4855 4843
4856 // Before calling pow, check if we could use sqrt instead of pow. 4844 // Before calling pow, check if we could use sqrt instead of pow.
4857 __ LoadDImmediate(result, kNegInfinity, PP); 4845 __ LoadDImmediate(result, kNegInfinity);
4858 4846
4859 // base == -Infinity -> call pow; 4847 // base == -Infinity -> call pow;
4860 __ fcmpd(saved_base, result); 4848 __ fcmpd(saved_base, result);
4861 __ b(&do_pow, EQ); 4849 __ b(&do_pow, EQ);
4862 4850
4863 // exponent == 0.5 ? 4851 // exponent == 0.5 ?
4864 __ LoadDImmediate(result, 0.5, PP); 4852 __ LoadDImmediate(result, 0.5);
4865 __ fcmpd(exp, result); 4853 __ fcmpd(exp, result);
4866 __ b(&do_pow, NE); 4854 __ b(&do_pow, NE);
4867 4855
4868 // base == 0 -> return 0; 4856 // base == 0 -> return 0;
4869 __ fcmpdz(saved_base); 4857 __ fcmpdz(saved_base);
4870 __ b(&return_zero, EQ); 4858 __ b(&return_zero, EQ);
4871 4859
4872 __ fsqrtd(result, saved_base); 4860 __ fsqrtd(result, saved_base);
4873 __ b(&skip_call); 4861 __ b(&skip_call);
4874 4862
4875 __ Bind(&return_zero); 4863 __ Bind(&return_zero);
4876 __ LoadDImmediate(result, 0.0, PP); 4864 __ LoadDImmediate(result, 0.0);
4877 __ b(&skip_call); 4865 __ b(&skip_call);
4878 4866
4879 __ Bind(&do_pow); 4867 __ Bind(&do_pow);
4880 __ fmovdd(base, saved_base); // Restore base. 4868 __ fmovdd(base, saved_base); // Restore base.
4881 4869
4882 __ CallRuntime(instr->TargetFunction(), kInputCount); 4870 __ CallRuntime(instr->TargetFunction(), kInputCount);
4883 __ Bind(&skip_call); 4871 __ Bind(&skip_call);
4884 } 4872 }
4885 4873
4886 4874
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
4981 __ b(deopt, EQ); 4969 __ b(deopt, EQ);
4982 } 4970 }
4983 4971
4984 __ SmiUntag(result_mod, left); 4972 __ SmiUntag(result_mod, left);
4985 __ SmiUntag(TMP, right); 4973 __ SmiUntag(TMP, right);
4986 4974
4987 __ sdiv(result_div, result_mod, TMP); 4975 __ sdiv(result_div, result_mod, TMP);
4988 4976
4989 // Check the corner case of dividing the 'MIN_SMI' with -1, in which 4977 // Check the corner case of dividing the 'MIN_SMI' with -1, in which
4990 // case we cannot tag the result. 4978 // case we cannot tag the result.
4991 __ CompareImmediate(result_div, 0x4000000000000000, PP); 4979 __ CompareImmediate(result_div, 0x4000000000000000);
4992 __ b(deopt, EQ); 4980 __ b(deopt, EQ);
4993 // result_mod <- left - right * result_div. 4981 // result_mod <- left - right * result_div.
4994 __ msub(result_mod, TMP, result_div, result_mod); 4982 __ msub(result_mod, TMP, result_div, result_mod);
4995 __ SmiTag(result_div); 4983 __ SmiTag(result_div);
4996 __ SmiTag(result_mod); 4984 __ SmiTag(result_mod);
4997 // Correct MOD result: 4985 // Correct MOD result:
4998 // res = left % right; 4986 // res = left % right;
4999 // if (res < 0) { 4987 // if (res < 0) {
5000 // if (right < 0) { 4988 // if (right < 0) {
5001 // res = res - right; 4989 // res = res - right;
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
5060 } 5048 }
5061 return summary; 5049 return summary;
5062 } 5050 }
5063 5051
5064 5052
5065 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5053 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5066 Label* deopt = compiler->AddDeoptStub(deopt_id(), 5054 Label* deopt = compiler->AddDeoptStub(deopt_id(),
5067 ICData::kDeoptCheckClass, 5055 ICData::kDeoptCheckClass,
5068 licm_hoisted_ ? ICData::kHoisted : 0); 5056 licm_hoisted_ ? ICData::kHoisted : 0);
5069 if (IsNullCheck()) { 5057 if (IsNullCheck()) {
5070 __ CompareObject(locs()->in(0).reg(), Object::null_object(), PP); 5058 __ CompareObject(locs()->in(0).reg(), Object::null_object());
5071 ASSERT(DeoptIfNull() || DeoptIfNotNull()); 5059 ASSERT(DeoptIfNull() || DeoptIfNotNull());
5072 Condition cond = DeoptIfNull() ? EQ : NE; 5060 Condition cond = DeoptIfNull() ? EQ : NE;
5073 __ b(deopt, cond); 5061 __ b(deopt, cond);
5074 return; 5062 return;
5075 } 5063 }
5076 5064
5077 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || 5065 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) ||
5078 (unary_checks().NumberOfChecks() > 1)); 5066 (unary_checks().NumberOfChecks() > 1));
5079 const Register value = locs()->in(0).reg(); 5067 const Register value = locs()->in(0).reg();
5080 const Register temp = locs()->temp(0).reg(); 5068 const Register temp = locs()->temp(0).reg();
5081 Label is_ok; 5069 Label is_ok;
5082 if (unary_checks().GetReceiverClassIdAt(0) == kSmiCid) { 5070 if (unary_checks().GetReceiverClassIdAt(0) == kSmiCid) {
5083 __ tsti(value, Immediate(kSmiTagMask)); 5071 __ tsti(value, Immediate(kSmiTagMask));
5084 __ b(&is_ok, EQ); 5072 __ b(&is_ok, EQ);
5085 } else { 5073 } else {
5086 __ tsti(value, Immediate(kSmiTagMask)); 5074 __ tsti(value, Immediate(kSmiTagMask));
5087 __ b(deopt, EQ); 5075 __ b(deopt, EQ);
5088 } 5076 }
5089 __ LoadClassId(temp, value, PP); 5077 __ LoadClassId(temp, value);
5090 5078
5091 if (IsDenseSwitch()) { 5079 if (IsDenseSwitch()) {
5092 ASSERT(cids_[0] < cids_[cids_.length() - 1]); 5080 ASSERT(cids_[0] < cids_[cids_.length() - 1]);
5093 __ AddImmediate(temp, temp, -cids_[0], PP); 5081 __ AddImmediate(temp, temp, -cids_[0]);
5094 __ CompareImmediate(temp, cids_[cids_.length() - 1] - cids_[0], PP); 5082 __ CompareImmediate(temp, cids_[cids_.length() - 1] - cids_[0]);
5095 __ b(deopt, HI); 5083 __ b(deopt, HI);
5096 5084
5097 intptr_t mask = ComputeCidMask(); 5085 intptr_t mask = ComputeCidMask();
5098 if (!IsDenseMask(mask)) { 5086 if (!IsDenseMask(mask)) {
5099 // Only need mask if there are missing numbers in the range. 5087 // Only need mask if there are missing numbers in the range.
5100 ASSERT(cids_.length() > 2); 5088 ASSERT(cids_.length() > 2);
5101 Register mask_reg = locs()->temp(1).reg(); 5089 Register mask_reg = locs()->temp(1).reg();
5102 __ LoadImmediate(mask_reg, 1, PP); 5090 __ LoadImmediate(mask_reg, 1);
5103 __ lslv(mask_reg, mask_reg, temp); 5091 __ lslv(mask_reg, mask_reg, temp);
5104 __ TestImmediate(mask_reg, mask, PP); 5092 __ TestImmediate(mask_reg, mask);
5105 __ b(deopt, EQ); 5093 __ b(deopt, EQ);
5106 } 5094 }
5107 5095
5108 } else { 5096 } else {
5109 GrowableArray<CidTarget> sorted_ic_data; 5097 GrowableArray<CidTarget> sorted_ic_data;
5110 FlowGraphCompiler::SortICDataByCount(unary_checks(), 5098 FlowGraphCompiler::SortICDataByCount(unary_checks(),
5111 &sorted_ic_data, 5099 &sorted_ic_data,
5112 /* drop_smi = */ true); 5100 /* drop_smi = */ true);
5113 const intptr_t num_checks = sorted_ic_data.length(); 5101 const intptr_t num_checks = sorted_ic_data.length();
5114 for (intptr_t i = 0; i < num_checks; i++) { 5102 for (intptr_t i = 0; i < num_checks; i++) {
5115 const intptr_t cid = sorted_ic_data[i].cid; 5103 const intptr_t cid = sorted_ic_data[i].cid;
5116 ASSERT(cid != kSmiCid); 5104 ASSERT(cid != kSmiCid);
5117 __ CompareImmediate(temp, cid, PP); 5105 __ CompareImmediate(temp, cid);
5118 if (i == (num_checks - 1)) { 5106 if (i == (num_checks - 1)) {
5119 __ b(deopt, NE); 5107 __ b(deopt, NE);
5120 } else { 5108 } else {
5121 __ b(&is_ok, EQ); 5109 __ b(&is_ok, EQ);
5122 } 5110 }
5123 } 5111 }
5124 } 5112 }
5125 __ Bind(&is_ok); 5113 __ Bind(&is_ok);
5126 } 5114 }
5127 5115
5128 5116
5129 LocationSummary* CheckClassIdInstr::MakeLocationSummary(Zone* zone, 5117 LocationSummary* CheckClassIdInstr::MakeLocationSummary(Zone* zone,
5130 bool opt) const { 5118 bool opt) const {
5131 const intptr_t kNumInputs = 1; 5119 const intptr_t kNumInputs = 1;
5132 const intptr_t kNumTemps = 0; 5120 const intptr_t kNumTemps = 0;
5133 LocationSummary* summary = new(zone) LocationSummary( 5121 LocationSummary* summary = new(zone) LocationSummary(
5134 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 5122 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
5135 summary->set_in(0, Location::RequiresRegister()); 5123 summary->set_in(0, Location::RequiresRegister());
5136 return summary; 5124 return summary;
5137 } 5125 }
5138 5126
5139 5127
5140 void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5128 void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5141 Register value = locs()->in(0).reg(); 5129 Register value = locs()->in(0).reg();
5142 Label* deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass); 5130 Label* deopt = compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass);
5143 __ CompareImmediate(value, Smi::RawValue(cid_), PP); 5131 __ CompareImmediate(value, Smi::RawValue(cid_));
5144 __ b(deopt, NE); 5132 __ b(deopt, NE);
5145 } 5133 }
5146 5134
5147 5135
5148 LocationSummary* CheckSmiInstr::MakeLocationSummary(Zone* zone, 5136 LocationSummary* CheckSmiInstr::MakeLocationSummary(Zone* zone,
5149 bool opt) const { 5137 bool opt) const {
5150 const intptr_t kNumInputs = 1; 5138 const intptr_t kNumInputs = 1;
5151 const intptr_t kNumTemps = 0; 5139 const intptr_t kNumTemps = 0;
5152 LocationSummary* summary = new(zone) LocationSummary( 5140 LocationSummary* summary = new(zone) LocationSummary(
5153 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); 5141 zone, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
5202 (Smi::Cast(index_loc.constant()).Value() < 0)); 5190 (Smi::Cast(index_loc.constant()).Value() < 0));
5203 // Unconditionally deoptimize for constant bounds checks because they 5191 // Unconditionally deoptimize for constant bounds checks because they
5204 // only occur only when index is out-of-bounds. 5192 // only occur only when index is out-of-bounds.
5205 __ b(deopt); 5193 __ b(deopt);
5206 return; 5194 return;
5207 } 5195 }
5208 5196
5209 if (index_loc.IsConstant()) { 5197 if (index_loc.IsConstant()) {
5210 const Register length = length_loc.reg(); 5198 const Register length = length_loc.reg();
5211 const Smi& index = Smi::Cast(index_loc.constant()); 5199 const Smi& index = Smi::Cast(index_loc.constant());
5212 __ CompareImmediate(length, reinterpret_cast<int64_t>(index.raw()), PP); 5200 __ CompareImmediate(length, reinterpret_cast<int64_t>(index.raw()));
5213 __ b(deopt, LS); 5201 __ b(deopt, LS);
5214 } else if (length_loc.IsConstant()) { 5202 } else if (length_loc.IsConstant()) {
5215 const Smi& length = Smi::Cast(length_loc.constant()); 5203 const Smi& length = Smi::Cast(length_loc.constant());
5216 const Register index = index_loc.reg(); 5204 const Register index = index_loc.reg();
5217 if (length.Value() == Smi::kMaxValue) { 5205 if (length.Value() == Smi::kMaxValue) {
5218 __ tst(index, Operand(index)); 5206 __ tst(index, Operand(index));
5219 __ b(deopt, MI); 5207 __ b(deopt, MI);
5220 } else { 5208 } else {
5221 __ CompareImmediate(index, reinterpret_cast<int64_t>(length.raw()), PP); 5209 __ CompareImmediate(index, reinterpret_cast<int64_t>(length.raw()));
5222 __ b(deopt, CS); 5210 __ b(deopt, CS);
5223 } 5211 }
5224 } else { 5212 } else {
5225 const Register length = length_loc.reg(); 5213 const Register length = length_loc.reg();
5226 const Register index = index_loc.reg(); 5214 const Register index = index_loc.reg();
5227 __ CompareRegisters(index, length); 5215 __ CompareRegisters(index, length);
5228 __ b(deopt, CS); 5216 __ b(deopt, CS);
5229 } 5217 }
5230 } 5218 }
5231 5219
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after
5527 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); 5515 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT);
5528 5516
5529 Label is_true, is_false; 5517 Label is_true, is_false;
5530 BranchLabels labels = { &is_true, &is_false, &is_false }; 5518 BranchLabels labels = { &is_true, &is_false, &is_false };
5531 Condition true_condition = EmitComparisonCode(compiler, labels); 5519 Condition true_condition = EmitComparisonCode(compiler, labels);
5532 EmitBranchOnCondition(compiler, true_condition, labels); 5520 EmitBranchOnCondition(compiler, true_condition, labels);
5533 5521
5534 const Register result = locs()->out(0).reg(); 5522 const Register result = locs()->out(0).reg();
5535 Label done; 5523 Label done;
5536 __ Bind(&is_false); 5524 __ Bind(&is_false);
5537 __ LoadObject(result, Bool::False(), PP); 5525 __ LoadObject(result, Bool::False());
5538 __ b(&done); 5526 __ b(&done);
5539 __ Bind(&is_true); 5527 __ Bind(&is_true);
5540 __ LoadObject(result, Bool::True(), PP); 5528 __ LoadObject(result, Bool::True());
5541 __ Bind(&done); 5529 __ Bind(&done);
5542 } 5530 }
5543 5531
5544 5532
5545 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, 5533 void StrictCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler,
5546 BranchInstr* branch) { 5534 BranchInstr* branch) {
5547 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); 5535 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT);
5548 5536
5549 BranchLabels labels = compiler->CreateBranchLabels(branch); 5537 BranchLabels labels = compiler->CreateBranchLabels(branch);
5550 Condition true_condition = EmitComparisonCode(compiler, labels); 5538 Condition true_condition = EmitComparisonCode(compiler, labels);
5551 EmitBranchOnCondition(compiler, true_condition, labels); 5539 EmitBranchOnCondition(compiler, true_condition, labels);
5552 } 5540 }
5553 5541
5554 5542
5555 LocationSummary* BooleanNegateInstr::MakeLocationSummary(Zone* zone, 5543 LocationSummary* BooleanNegateInstr::MakeLocationSummary(Zone* zone,
5556 bool opt) const { 5544 bool opt) const {
5557 return LocationSummary::Make(zone, 5545 return LocationSummary::Make(zone,
5558 1, 5546 1,
5559 Location::RequiresRegister(), 5547 Location::RequiresRegister(),
5560 LocationSummary::kNoCall); 5548 LocationSummary::kNoCall);
5561 } 5549 }
5562 5550
5563 5551
5564 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5552 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5565 const Register value = locs()->in(0).reg(); 5553 const Register value = locs()->in(0).reg();
5566 const Register result = locs()->out(0).reg(); 5554 const Register result = locs()->out(0).reg();
5567 5555
5568 __ LoadObject(result, Bool::True(), PP); 5556 __ LoadObject(result, Bool::True());
5569 __ LoadObject(TMP, Bool::False(), PP); 5557 __ LoadObject(TMP, Bool::False());
5570 __ CompareRegisters(result, value); 5558 __ CompareRegisters(result, value);
5571 __ csel(result, TMP, result, EQ); 5559 __ csel(result, TMP, result, EQ);
5572 } 5560 }
5573 5561
5574 5562
5575 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone, 5563 LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
5576 bool opt) const { 5564 bool opt) const {
5577 return MakeCallSummary(zone); 5565 return MakeCallSummary(zone);
5578 } 5566 }
5579 5567
(...skipping 27 matching lines...) Expand all
5607 zone, kNumInputs, kNumTemps, LocationSummary::kCall); 5595 zone, kNumInputs, kNumTemps, LocationSummary::kCall);
5608 locs->set_in(0, Location::RegisterLocation(R0)); 5596 locs->set_in(0, Location::RegisterLocation(R0));
5609 locs->set_out(0, Location::RegisterLocation(R0)); 5597 locs->set_out(0, Location::RegisterLocation(R0));
5610 return locs; 5598 return locs;
5611 } 5599 }
5612 5600
5613 5601
5614 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 5602 void GrowRegExpStackInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
5615 const Register typed_data = locs()->in(0).reg(); 5603 const Register typed_data = locs()->in(0).reg();
5616 const Register result = locs()->out(0).reg(); 5604 const Register result = locs()->out(0).reg();
5617 __ PushObject(Object::null_object(), PP); 5605 __ PushObject(Object::null_object());
5618 __ Push(typed_data); 5606 __ Push(typed_data);
5619 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position. 5607 compiler->GenerateRuntimeCall(Scanner::kNoSourcePos, // No token position.
5620 deopt_id(), 5608 deopt_id(),
5621 kGrowRegExpStackRuntimeEntry, 5609 kGrowRegExpStackRuntimeEntry,
5622 1, 5610 1,
5623 locs()); 5611 locs());
5624 __ Drop(1); 5612 __ Drop(1);
5625 __ Pop(result); 5613 __ Pop(result);
5626 } 5614 }
5627 5615
5628 5616
5629 } // namespace dart 5617 } // namespace dart
5630 5618
5631 #endif // defined TARGET_ARCH_ARM64 5619 #endif // defined TARGET_ARCH_ARM64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698