| OLD | NEW |
| 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_DBC. |
| 6 #if defined(TARGET_ARCH_DBC) | 6 #if defined(TARGET_ARCH_DBC) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/compiler.h" |
| 10 #include "vm/cpu.h" | 11 #include "vm/cpu.h" |
| 11 #include "vm/compiler.h" | |
| 12 #include "vm/dart_entry.h" | 12 #include "vm/dart_entry.h" |
| 13 #include "vm/flow_graph.h" | 13 #include "vm/flow_graph.h" |
| 14 #include "vm/flow_graph_compiler.h" | 14 #include "vm/flow_graph_compiler.h" |
| 15 #include "vm/flow_graph_range_analysis.h" | 15 #include "vm/flow_graph_range_analysis.h" |
| 16 #include "vm/locations.h" | 16 #include "vm/locations.h" |
| 17 #include "vm/object_store.h" | 17 #include "vm/object_store.h" |
| 18 #include "vm/parser.h" | 18 #include "vm/parser.h" |
| 19 #include "vm/simulator.h" | 19 #include "vm/simulator.h" |
| 20 #include "vm/stack_frame.h" | 20 #include "vm/stack_frame.h" |
| 21 #include "vm/stub_code.h" | 21 #include "vm/stub_code.h" |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 111 for (intptr_t i = 0; i < num_temps; i++) { | 111 for (intptr_t i = 0; i < num_temps; i++) { |
| 112 locs->set_temp(i, Location::RequiresRegister()); | 112 locs->set_temp(i, Location::RequiresRegister()); |
| 113 } | 113 } |
| 114 if (!output.IsInvalid()) { | 114 if (!output.IsInvalid()) { |
| 115 // For instructions that call we default to returning result in R0. | 115 // For instructions that call we default to returning result in R0. |
| 116 locs->set_out(0, output); | 116 locs->set_out(0, output); |
| 117 } | 117 } |
| 118 return locs; | 118 return locs; |
| 119 } | 119 } |
| 120 | 120 |
| 121 | |
| 122 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, ...) \ | 121 #define DEFINE_MAKE_LOCATION_SUMMARY(Name, ...) \ |
| 123 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ | 122 LocationSummary* Name##Instr::MakeLocationSummary(Zone* zone, bool opt) \ |
| 124 const { \ | 123 const { \ |
| 125 return CreateLocationSummary(zone, __VA_ARGS__); \ | 124 return CreateLocationSummary(zone, __VA_ARGS__); \ |
| 126 } | 125 } |
| 127 | 126 |
| 128 #define EMIT_NATIVE_CODE(Name, ...) \ | 127 #define EMIT_NATIVE_CODE(Name, ...) \ |
| 129 DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \ | 128 DEFINE_MAKE_LOCATION_SUMMARY(Name, __VA_ARGS__); \ |
| 130 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) | 129 void Name##Instr::EmitNativeCode(FlowGraphCompiler* compiler) |
| 131 | 130 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 172 #undef DEFINE_UNIMPLEMENTED | 171 #undef DEFINE_UNIMPLEMENTED |
| 173 | 172 |
| 174 #define DEFINE_UNREACHABLE(Name) \ | 173 #define DEFINE_UNREACHABLE(Name) \ |
| 175 DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \ | 174 DEFINE_UNREACHABLE_MAKE_LOCATION_SUMMARY(Name) \ |
| 176 DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) | 175 DEFINE_UNREACHABLE_EMIT_NATIVE_CODE(Name) |
| 177 | 176 |
| 178 FOR_EACH_UNREACHABLE_INSTRUCTION(DEFINE_UNREACHABLE) | 177 FOR_EACH_UNREACHABLE_INSTRUCTION(DEFINE_UNREACHABLE) |
| 179 | 178 |
| 180 #undef DEFINE_UNREACHABLE | 179 #undef DEFINE_UNREACHABLE |
| 181 | 180 |
| 182 | |
| 183 // Only used in AOT compilation. | 181 // Only used in AOT compilation. |
| 184 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(CheckedSmiComparison) | 182 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(CheckedSmiComparison) |
| 185 | 183 |
| 186 | |
| 187 EMIT_NATIVE_CODE(InstanceOf, | 184 EMIT_NATIVE_CODE(InstanceOf, |
| 188 3, | 185 3, |
| 189 Location::SameAsFirstInput(), | 186 Location::SameAsFirstInput(), |
| 190 LocationSummary::kCall) { | 187 LocationSummary::kCall) { |
| 191 SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); | 188 SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); |
| 192 if (!type().IsVoidType() && type().IsInstantiated()) { | 189 if (!type().IsVoidType() && type().IsInstantiated()) { |
| 193 test_cache = SubtypeTestCache::New(); | 190 test_cache = SubtypeTestCache::New(); |
| 194 } | 191 } |
| 195 | 192 |
| 196 if (compiler->is_optimizing()) { | 193 if (compiler->is_optimizing()) { |
| 197 __ Push(locs()->in(0).reg()); // Value. | 194 __ Push(locs()->in(0).reg()); // Value. |
| 198 __ Push(locs()->in(1).reg()); // Instantiator type arguments. | 195 __ Push(locs()->in(1).reg()); // Instantiator type arguments. |
| 199 __ Push(locs()->in(2).reg()); // Function type arguments. | 196 __ Push(locs()->in(2).reg()); // Function type arguments. |
| 200 } | 197 } |
| 201 | 198 |
| 202 __ PushConstant(type()); | 199 __ PushConstant(type()); |
| 203 __ PushConstant(test_cache); | 200 __ PushConstant(test_cache); |
| 204 __ InstanceOf(); | 201 __ InstanceOf(); |
| 205 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 202 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 206 token_pos()); | 203 token_pos()); |
| 207 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); | 204 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); |
| 208 if (compiler->is_optimizing()) { | 205 if (compiler->is_optimizing()) { |
| 209 __ PopLocal(locs()->out(0).reg()); | 206 __ PopLocal(locs()->out(0).reg()); |
| 210 } | 207 } |
| 211 } | 208 } |
| 212 | 209 |
| 213 | |
| 214 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, | 210 DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, |
| 215 3, | 211 3, |
| 216 Location::SameAsFirstInput(), | 212 Location::SameAsFirstInput(), |
| 217 LocationSummary::kCall); | 213 LocationSummary::kCall); |
| 218 | 214 |
| 219 | |
| 220 EMIT_NATIVE_CODE(AssertBoolean, | 215 EMIT_NATIVE_CODE(AssertBoolean, |
| 221 1, | 216 1, |
| 222 Location::SameAsFirstInput(), | 217 Location::SameAsFirstInput(), |
| 223 LocationSummary::kCall) { | 218 LocationSummary::kCall) { |
| 224 if (compiler->is_optimizing()) { | 219 if (compiler->is_optimizing()) { |
| 225 __ Push(locs()->in(0).reg()); | 220 __ Push(locs()->in(0).reg()); |
| 226 } | 221 } |
| 227 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); | 222 __ AssertBoolean(Isolate::Current()->type_checks() ? 1 : 0); |
| 228 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 223 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 229 token_pos()); | 224 token_pos()); |
| 230 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); | 225 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); |
| 231 if (compiler->is_optimizing()) { | 226 if (compiler->is_optimizing()) { |
| 232 __ Drop1(); | 227 __ Drop1(); |
| 233 } | 228 } |
| 234 } | 229 } |
| 235 | 230 |
| 236 | |
| 237 EMIT_NATIVE_CODE(PolymorphicInstanceCall, | 231 EMIT_NATIVE_CODE(PolymorphicInstanceCall, |
| 238 0, | 232 0, |
| 239 Location::RegisterLocation(0), | 233 Location::RegisterLocation(0), |
| 240 LocationSummary::kCall) { | 234 LocationSummary::kCall) { |
| 241 const Array& arguments_descriptor = | 235 const Array& arguments_descriptor = |
| 242 Array::Handle(instance_call()->GetArgumentsDescriptor()); | 236 Array::Handle(instance_call()->GetArgumentsDescriptor()); |
| 243 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); | 237 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); |
| 244 | 238 |
| 245 // Push the target onto the stack. | 239 // Push the target onto the stack. |
| 246 const intptr_t length = targets_.length(); | 240 const intptr_t length = targets_.length(); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 275 0); | 269 0); |
| 276 | 270 |
| 277 // Call the function. | 271 // Call the function. |
| 278 __ StaticCall(instance_call()->ArgumentCount(), argdesc_kidx); | 272 __ StaticCall(instance_call()->ArgumentCount(), argdesc_kidx); |
| 279 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 273 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 280 instance_call()->token_pos()); | 274 instance_call()->token_pos()); |
| 281 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); | 275 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); |
| 282 __ PopLocal(locs()->out(0).reg()); | 276 __ PopLocal(locs()->out(0).reg()); |
| 283 } | 277 } |
| 284 | 278 |
| 285 | |
| 286 EMIT_NATIVE_CODE(Stop, 0) { | 279 EMIT_NATIVE_CODE(Stop, 0) { |
| 287 __ Stop(message()); | 280 __ Stop(message()); |
| 288 } | 281 } |
| 289 | 282 |
| 290 | |
| 291 EMIT_NATIVE_CODE(CheckStackOverflow, | 283 EMIT_NATIVE_CODE(CheckStackOverflow, |
| 292 0, | 284 0, |
| 293 Location::NoLocation(), | 285 Location::NoLocation(), |
| 294 LocationSummary::kCall) { | 286 LocationSummary::kCall) { |
| 295 if (compiler->ForceSlowPathForStackOverflow()) { | 287 if (compiler->ForceSlowPathForStackOverflow()) { |
| 296 __ CheckStackAlwaysExit(); | 288 __ CheckStackAlwaysExit(); |
| 297 } else { | 289 } else { |
| 298 __ CheckStack(); | 290 __ CheckStack(); |
| 299 } | 291 } |
| 300 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 292 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 301 token_pos()); | 293 token_pos()); |
| 302 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); | 294 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); |
| 303 } | 295 } |
| 304 | 296 |
| 305 | |
| 306 EMIT_NATIVE_CODE(PushArgument, 1) { | 297 EMIT_NATIVE_CODE(PushArgument, 1) { |
| 307 if (compiler->is_optimizing()) { | 298 if (compiler->is_optimizing()) { |
| 308 __ Push(locs()->in(0).reg()); | 299 __ Push(locs()->in(0).reg()); |
| 309 } | 300 } |
| 310 } | 301 } |
| 311 | 302 |
| 312 | |
| 313 EMIT_NATIVE_CODE(LoadLocal, 0) { | 303 EMIT_NATIVE_CODE(LoadLocal, 0) { |
| 314 ASSERT(!compiler->is_optimizing()); | 304 ASSERT(!compiler->is_optimizing()); |
| 315 ASSERT(local().index() != 0); | 305 ASSERT(local().index() != 0); |
| 316 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); | 306 __ Push((local().index() > 0) ? (-local().index()) : (-local().index() - 1)); |
| 317 } | 307 } |
| 318 | 308 |
| 319 | |
| 320 EMIT_NATIVE_CODE(StoreLocal, 0) { | 309 EMIT_NATIVE_CODE(StoreLocal, 0) { |
| 321 ASSERT(!compiler->is_optimizing()); | 310 ASSERT(!compiler->is_optimizing()); |
| 322 ASSERT(local().index() != 0); | 311 ASSERT(local().index() != 0); |
| 323 if (HasTemp()) { | 312 if (HasTemp()) { |
| 324 __ StoreLocal((local().index() > 0) ? (-local().index()) | 313 __ StoreLocal((local().index() > 0) ? (-local().index()) |
| 325 : (-local().index() - 1)); | 314 : (-local().index() - 1)); |
| 326 } else { | 315 } else { |
| 327 __ PopLocal((local().index() > 0) ? (-local().index()) | 316 __ PopLocal((local().index() > 0) ? (-local().index()) |
| 328 : (-local().index() - 1)); | 317 : (-local().index() - 1)); |
| 329 } | 318 } |
| 330 } | 319 } |
| 331 | 320 |
| 332 | |
| 333 EMIT_NATIVE_CODE(LoadClassId, 1, Location::RequiresRegister()) { | 321 EMIT_NATIVE_CODE(LoadClassId, 1, Location::RequiresRegister()) { |
| 334 if (compiler->is_optimizing()) { | 322 if (compiler->is_optimizing()) { |
| 335 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg()); | 323 __ LoadClassId(locs()->out(0).reg(), locs()->in(0).reg()); |
| 336 } else { | 324 } else { |
| 337 __ LoadClassIdTOS(); | 325 __ LoadClassIdTOS(); |
| 338 } | 326 } |
| 339 } | 327 } |
| 340 | 328 |
| 341 | |
| 342 EMIT_NATIVE_CODE(Constant, 0, Location::RequiresRegister()) { | 329 EMIT_NATIVE_CODE(Constant, 0, Location::RequiresRegister()) { |
| 343 if (compiler->is_optimizing()) { | 330 if (compiler->is_optimizing()) { |
| 344 if (locs()->out(0).IsRegister()) { | 331 if (locs()->out(0).IsRegister()) { |
| 345 __ LoadConstant(locs()->out(0).reg(), value()); | 332 __ LoadConstant(locs()->out(0).reg(), value()); |
| 346 } | 333 } |
| 347 } else { | 334 } else { |
| 348 __ PushConstant(value()); | 335 __ PushConstant(value()); |
| 349 } | 336 } |
| 350 } | 337 } |
| 351 | 338 |
| 352 | |
| 353 EMIT_NATIVE_CODE(UnboxedConstant, 0, Location::RequiresRegister()) { | 339 EMIT_NATIVE_CODE(UnboxedConstant, 0, Location::RequiresRegister()) { |
| 354 // The register allocator drops constant definitions that have no uses. | 340 // The register allocator drops constant definitions that have no uses. |
| 355 if (locs()->out(0).IsInvalid()) { | 341 if (locs()->out(0).IsInvalid()) { |
| 356 return; | 342 return; |
| 357 } | 343 } |
| 358 if (representation_ != kUnboxedDouble) { | 344 if (representation_ != kUnboxedDouble) { |
| 359 Unsupported(compiler); | 345 Unsupported(compiler); |
| 360 UNREACHABLE(); | 346 UNREACHABLE(); |
| 361 } | 347 } |
| 362 const Register result = locs()->out(0).reg(); | 348 const Register result = locs()->out(0).reg(); |
| 363 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) { | 349 if (Utils::DoublesBitEqual(Double::Cast(value()).value(), 0.0)) { |
| 364 __ BitXor(result, result, result); | 350 __ BitXor(result, result, result); |
| 365 } else { | 351 } else { |
| 366 __ LoadConstant(result, value()); | 352 __ LoadConstant(result, value()); |
| 367 __ UnboxDouble(result, result); | 353 __ UnboxDouble(result, result); |
| 368 } | 354 } |
| 369 } | 355 } |
| 370 | 356 |
| 371 | |
| 372 EMIT_NATIVE_CODE(Return, 1) { | 357 EMIT_NATIVE_CODE(Return, 1) { |
| 373 if (compiler->is_optimizing()) { | 358 if (compiler->is_optimizing()) { |
| 374 __ Return(locs()->in(0).reg()); | 359 __ Return(locs()->in(0).reg()); |
| 375 } else { | 360 } else { |
| 376 __ ReturnTOS(); | 361 __ ReturnTOS(); |
| 377 } | 362 } |
| 378 } | 363 } |
| 379 | 364 |
| 380 | |
| 381 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone, | 365 LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone, |
| 382 bool opt) const { | 366 bool opt) const { |
| 383 const intptr_t kNumInputs = 1; | 367 const intptr_t kNumInputs = 1; |
| 384 const intptr_t kNumTemps = 1; | 368 const intptr_t kNumTemps = 1; |
| 385 LocationSummary* locs = new (zone) | 369 LocationSummary* locs = new (zone) |
| 386 LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 370 LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 387 for (intptr_t i = 0; i < kNumInputs; i++) { | 371 for (intptr_t i = 0; i < kNumInputs; i++) { |
| 388 locs->set_in(i, Location::RequiresRegister()); | 372 locs->set_in(i, Location::RequiresRegister()); |
| 389 } | 373 } |
| 390 for (intptr_t i = 0; i < kNumTemps; i++) { | 374 for (intptr_t i = 0; i < kNumTemps; i++) { |
| 391 locs->set_temp(i, Location::RequiresRegister()); | 375 locs->set_temp(i, Location::RequiresRegister()); |
| 392 } | 376 } |
| 393 return locs; | 377 return locs; |
| 394 } | 378 } |
| 395 | 379 |
| 396 | |
| 397 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 380 void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 398 if (compiler->is_optimizing()) { | 381 if (compiler->is_optimizing()) { |
| 399 __ LoadConstant(locs()->temp(0).reg(), | 382 __ LoadConstant(locs()->temp(0).reg(), |
| 400 Field::ZoneHandle(field().Original())); | 383 Field::ZoneHandle(field().Original())); |
| 401 __ StoreField(locs()->temp(0).reg(), | 384 __ StoreField(locs()->temp(0).reg(), |
| 402 Field::static_value_offset() / kWordSize, | 385 Field::static_value_offset() / kWordSize, |
| 403 locs()->in(0).reg()); | 386 locs()->in(0).reg()); |
| 404 } else { | 387 } else { |
| 405 const intptr_t kidx = __ AddConstant(field()); | 388 const intptr_t kidx = __ AddConstant(field()); |
| 406 __ StoreStaticTOS(kidx); | 389 __ StoreStaticTOS(kidx); |
| 407 } | 390 } |
| 408 } | 391 } |
| 409 | 392 |
| 410 | |
| 411 EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) { | 393 EMIT_NATIVE_CODE(LoadStaticField, 1, Location::RequiresRegister()) { |
| 412 if (compiler->is_optimizing()) { | 394 if (compiler->is_optimizing()) { |
| 413 __ LoadField(locs()->out(0).reg(), locs()->in(0).reg(), | 395 __ LoadField(locs()->out(0).reg(), locs()->in(0).reg(), |
| 414 Field::static_value_offset() / kWordSize); | 396 Field::static_value_offset() / kWordSize); |
| 415 } else { | 397 } else { |
| 416 const intptr_t kidx = __ AddConstant(StaticField()); | 398 const intptr_t kidx = __ AddConstant(StaticField()); |
| 417 __ PushStatic(kidx); | 399 __ PushStatic(kidx); |
| 418 } | 400 } |
| 419 } | 401 } |
| 420 | 402 |
| 421 | |
| 422 EMIT_NATIVE_CODE(InitStaticField, | 403 EMIT_NATIVE_CODE(InitStaticField, |
| 423 1, | 404 1, |
| 424 Location::NoLocation(), | 405 Location::NoLocation(), |
| 425 LocationSummary::kCall) { | 406 LocationSummary::kCall) { |
| 426 if (compiler->is_optimizing()) { | 407 if (compiler->is_optimizing()) { |
| 427 __ Push(locs()->in(0).reg()); | 408 __ Push(locs()->in(0).reg()); |
| 428 __ InitStaticTOS(); | 409 __ InitStaticTOS(); |
| 429 } else { | 410 } else { |
| 430 __ InitStaticTOS(); | 411 __ InitStaticTOS(); |
| 431 } | 412 } |
| 432 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); | 413 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); |
| 433 } | 414 } |
| 434 | 415 |
| 435 | |
| 436 EMIT_NATIVE_CODE(ClosureCall, | 416 EMIT_NATIVE_CODE(ClosureCall, |
| 437 1, | 417 1, |
| 438 Location::RegisterLocation(0), | 418 Location::RegisterLocation(0), |
| 439 LocationSummary::kCall) { | 419 LocationSummary::kCall) { |
| 440 if (compiler->is_optimizing()) { | 420 if (compiler->is_optimizing()) { |
| 441 __ Push(locs()->in(0).reg()); | 421 __ Push(locs()->in(0).reg()); |
| 442 } | 422 } |
| 443 | 423 |
| 444 const Array& arguments_descriptor = | 424 const Array& arguments_descriptor = |
| 445 Array::ZoneHandle(GetArgumentsDescriptor()); | 425 Array::ZoneHandle(GetArgumentsDescriptor()); |
| 446 const intptr_t argdesc_kidx = | 426 const intptr_t argdesc_kidx = |
| 447 compiler->assembler()->AddConstant(arguments_descriptor); | 427 compiler->assembler()->AddConstant(arguments_descriptor); |
| 448 __ StaticCall(ArgumentCount(), argdesc_kidx); | 428 __ StaticCall(ArgumentCount(), argdesc_kidx); |
| 449 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); | 429 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); |
| 450 if (compiler->is_optimizing()) { | 430 if (compiler->is_optimizing()) { |
| 451 __ PopLocal(locs()->out(0).reg()); | 431 __ PopLocal(locs()->out(0).reg()); |
| 452 } | 432 } |
| 453 } | 433 } |
| 454 | 434 |
| 455 | |
| 456 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, | 435 static void EmitBranchOnCondition(FlowGraphCompiler* compiler, |
| 457 Condition true_condition, | 436 Condition true_condition, |
| 458 BranchLabels labels) { | 437 BranchLabels labels) { |
| 459 if (true_condition == NEXT_IS_TRUE) { | 438 if (true_condition == NEXT_IS_TRUE) { |
| 460 // NEXT_IS_TRUE indicates that the preceeding test expects the true case | 439 // NEXT_IS_TRUE indicates that the preceeding test expects the true case |
| 461 // to be in the subsequent instruction, which it skips if the test fails. | 440 // to be in the subsequent instruction, which it skips if the test fails. |
| 462 __ Jump(labels.true_label); | 441 __ Jump(labels.true_label); |
| 463 if (labels.fall_through != labels.false_label) { | 442 if (labels.fall_through != labels.false_label) { |
| 464 // The preceeding Jump instruction will be skipped if the test fails. | 443 // The preceeding Jump instruction will be skipped if the test fails. |
| 465 // If we aren't falling through to the false case, then we have to do | 444 // If we aren't falling through to the false case, then we have to do |
| 466 // a Jump to it here. | 445 // a Jump to it here. |
| 467 __ Jump(labels.false_label); | 446 __ Jump(labels.false_label); |
| 468 } | 447 } |
| 469 } else { | 448 } else { |
| 470 ASSERT(true_condition == NEXT_IS_FALSE); | 449 ASSERT(true_condition == NEXT_IS_FALSE); |
| 471 // NEXT_IS_FALSE indicates that the preceeding test has been flipped and | 450 // NEXT_IS_FALSE indicates that the preceeding test has been flipped and |
| 472 // expects the false case to be in the subsequent instruction, which it | 451 // expects the false case to be in the subsequent instruction, which it |
| 473 // skips if the test succeeds. | 452 // skips if the test succeeds. |
| 474 __ Jump(labels.false_label); | 453 __ Jump(labels.false_label); |
| 475 if (labels.fall_through != labels.true_label) { | 454 if (labels.fall_through != labels.true_label) { |
| 476 // The preceeding Jump instruction will be skipped if the test succeeds. | 455 // The preceeding Jump instruction will be skipped if the test succeeds. |
| 477 // If we aren't falling through to the true case, then we have to do | 456 // If we aren't falling through to the true case, then we have to do |
| 478 // a Jump to it here. | 457 // a Jump to it here. |
| 479 __ Jump(labels.true_label); | 458 __ Jump(labels.true_label); |
| 480 } | 459 } |
| 481 } | 460 } |
| 482 } | 461 } |
| 483 | 462 |
| 484 | |
| 485 Condition StrictCompareInstr::GetNextInstructionCondition( | 463 Condition StrictCompareInstr::GetNextInstructionCondition( |
| 486 FlowGraphCompiler* compiler, | 464 FlowGraphCompiler* compiler, |
| 487 BranchLabels labels) { | 465 BranchLabels labels) { |
| 488 return (labels.fall_through == labels.false_label) ? NEXT_IS_TRUE | 466 return (labels.fall_through == labels.false_label) ? NEXT_IS_TRUE |
| 489 : NEXT_IS_FALSE; | 467 : NEXT_IS_FALSE; |
| 490 } | 468 } |
| 491 | 469 |
| 492 | |
| 493 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 470 Condition StrictCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 494 BranchLabels labels) { | 471 BranchLabels labels) { |
| 495 ASSERT((kind() == Token::kNE_STRICT) || (kind() == Token::kEQ_STRICT)); | 472 ASSERT((kind() == Token::kNE_STRICT) || (kind() == Token::kEQ_STRICT)); |
| 496 | 473 |
| 497 Token::Kind comparison; | 474 Token::Kind comparison; |
| 498 Condition condition; | 475 Condition condition; |
| 499 if (labels.fall_through == labels.false_label) { | 476 if (labels.fall_through == labels.false_label) { |
| 500 condition = NEXT_IS_TRUE; | 477 condition = NEXT_IS_TRUE; |
| 501 comparison = kind(); | 478 comparison = kind(); |
| 502 } else { | 479 } else { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 525 | 502 |
| 526 if (needs_number_check() && token_pos().IsReal()) { | 503 if (needs_number_check() && token_pos().IsReal()) { |
| 527 compiler->RecordSafepoint(locs()); | 504 compiler->RecordSafepoint(locs()); |
| 528 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id_, | 505 compiler->AddCurrentDescriptor(RawPcDescriptors::kRuntimeCall, deopt_id_, |
| 529 token_pos()); | 506 token_pos()); |
| 530 } | 507 } |
| 531 | 508 |
| 532 return condition; | 509 return condition; |
| 533 } | 510 } |
| 534 | 511 |
| 535 | |
| 536 DEFINE_MAKE_LOCATION_SUMMARY(StrictCompare, | 512 DEFINE_MAKE_LOCATION_SUMMARY(StrictCompare, |
| 537 2, | 513 2, |
| 538 Location::RequiresRegister(), | 514 Location::RequiresRegister(), |
| 539 needs_number_check() ? LocationSummary::kCall | 515 needs_number_check() ? LocationSummary::kCall |
| 540 : LocationSummary::kNoCall) | 516 : LocationSummary::kNoCall) |
| 541 | 517 |
| 542 | |
| 543 void ComparisonInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 518 void ComparisonInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
| 544 BranchInstr* branch) { | 519 BranchInstr* branch) { |
| 545 BranchLabels labels = compiler->CreateBranchLabels(branch); | 520 BranchLabels labels = compiler->CreateBranchLabels(branch); |
| 546 Condition true_condition = EmitComparisonCode(compiler, labels); | 521 Condition true_condition = EmitComparisonCode(compiler, labels); |
| 547 if (true_condition != INVALID_CONDITION) { | 522 if (true_condition != INVALID_CONDITION) { |
| 548 EmitBranchOnCondition(compiler, true_condition, labels); | 523 EmitBranchOnCondition(compiler, true_condition, labels); |
| 549 } | 524 } |
| 550 } | 525 } |
| 551 | 526 |
| 552 | |
| 553 void ComparisonInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 527 void ComparisonInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 554 Label is_true, is_false; | 528 Label is_true, is_false; |
| 555 BranchLabels labels = {&is_true, &is_false, &is_false}; | 529 BranchLabels labels = {&is_true, &is_false, &is_false}; |
| 556 Condition true_condition = | 530 Condition true_condition = |
| 557 this->GetNextInstructionCondition(compiler, labels); | 531 this->GetNextInstructionCondition(compiler, labels); |
| 558 if (true_condition == INVALID_CONDITION || !compiler->is_optimizing() || | 532 if (true_condition == INVALID_CONDITION || !compiler->is_optimizing() || |
| 559 is_true.IsLinked() || is_false.IsLinked()) { | 533 is_true.IsLinked() || is_false.IsLinked()) { |
| 560 Condition actual_condition = EmitComparisonCode(compiler, labels); | 534 Condition actual_condition = EmitComparisonCode(compiler, labels); |
| 561 ASSERT(actual_condition == true_condition); | 535 ASSERT(actual_condition == true_condition); |
| 562 if (true_condition != INVALID_CONDITION) { | 536 if (true_condition != INVALID_CONDITION) { |
| (...skipping 28 matching lines...) Expand all Loading... |
| 591 (!next_is_true && is_true.IsLinked())) { | 565 (!next_is_true && is_true.IsLinked())) { |
| 592 Label done; | 566 Label done; |
| 593 __ Jump(&done); | 567 __ Jump(&done); |
| 594 __ Bind(next_is_true ? &is_false : &is_true); | 568 __ Bind(next_is_true ? &is_false : &is_true); |
| 595 __ LoadConstant(result, Bool::Get(!next_is_true)); | 569 __ LoadConstant(result, Bool::Get(!next_is_true)); |
| 596 __ Bind(&done); | 570 __ Bind(&done); |
| 597 } | 571 } |
| 598 } | 572 } |
| 599 } | 573 } |
| 600 | 574 |
| 601 | |
| 602 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, bool opt) const { | 575 LocationSummary* BranchInstr::MakeLocationSummary(Zone* zone, bool opt) const { |
| 603 comparison()->InitializeLocationSummary(zone, opt); | 576 comparison()->InitializeLocationSummary(zone, opt); |
| 604 if (!comparison()->HasLocs()) { | 577 if (!comparison()->HasLocs()) { |
| 605 return NULL; | 578 return NULL; |
| 606 } | 579 } |
| 607 // Branches don't produce a result. | 580 // Branches don't produce a result. |
| 608 comparison()->locs()->set_out(0, Location::NoLocation()); | 581 comparison()->locs()->set_out(0, Location::NoLocation()); |
| 609 return comparison()->locs(); | 582 return comparison()->locs(); |
| 610 } | 583 } |
| 611 | 584 |
| 612 | |
| 613 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 585 void BranchInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 614 comparison()->EmitBranchCode(compiler, this); | 586 comparison()->EmitBranchCode(compiler, this); |
| 615 } | 587 } |
| 616 | 588 |
| 617 | |
| 618 EMIT_NATIVE_CODE(Goto, 0) { | 589 EMIT_NATIVE_CODE(Goto, 0) { |
| 619 if (!compiler->is_optimizing()) { | 590 if (!compiler->is_optimizing()) { |
| 620 // Add a deoptimization descriptor for deoptimizing instructions that | 591 // Add a deoptimization descriptor for deoptimizing instructions that |
| 621 // may be inserted before this instruction. | 592 // may be inserted before this instruction. |
| 622 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, GetDeoptId(), | 593 compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, GetDeoptId(), |
| 623 TokenPosition::kNoSource); | 594 TokenPosition::kNoSource); |
| 624 } | 595 } |
| 625 if (HasParallelMove()) { | 596 if (HasParallelMove()) { |
| 626 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 597 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
| 627 } | 598 } |
| 628 // We can fall through if the successor is the next block in the list. | 599 // We can fall through if the successor is the next block in the list. |
| 629 // Otherwise, we need a jump. | 600 // Otherwise, we need a jump. |
| 630 if (!compiler->CanFallThroughTo(successor())) { | 601 if (!compiler->CanFallThroughTo(successor())) { |
| 631 __ Jump(compiler->GetJumpLabel(successor())); | 602 __ Jump(compiler->GetJumpLabel(successor())); |
| 632 } | 603 } |
| 633 } | 604 } |
| 634 | 605 |
| 635 | |
| 636 Condition TestSmiInstr::GetNextInstructionCondition(FlowGraphCompiler* compiler, | 606 Condition TestSmiInstr::GetNextInstructionCondition(FlowGraphCompiler* compiler, |
| 637 BranchLabels labels) { | 607 BranchLabels labels) { |
| 638 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); | 608 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); |
| 639 return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE; | 609 return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE; |
| 640 } | 610 } |
| 641 | 611 |
| 642 | |
| 643 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 612 Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 644 BranchLabels labels) { | 613 BranchLabels labels) { |
| 645 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); | 614 ASSERT((kind() == Token::kEQ) || (kind() == Token::kNE)); |
| 646 Register left = locs()->in(0).reg(); | 615 Register left = locs()->in(0).reg(); |
| 647 Register right = locs()->in(1).reg(); | 616 Register right = locs()->in(1).reg(); |
| 648 __ TestSmi(left, right); | 617 __ TestSmi(left, right); |
| 649 return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE; | 618 return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE; |
| 650 } | 619 } |
| 651 | 620 |
| 652 | |
| 653 DEFINE_MAKE_LOCATION_SUMMARY(TestSmi, | 621 DEFINE_MAKE_LOCATION_SUMMARY(TestSmi, |
| 654 2, | 622 2, |
| 655 Location::RequiresRegister(), | 623 Location::RequiresRegister(), |
| 656 LocationSummary::kNoCall) | 624 LocationSummary::kNoCall) |
| 657 | 625 |
| 658 | |
| 659 Condition TestCidsInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 626 Condition TestCidsInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 660 BranchLabels labels) { | 627 BranchLabels labels) { |
| 661 ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT)); | 628 ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT)); |
| 662 const Register value = locs()->in(0).reg(); | 629 const Register value = locs()->in(0).reg(); |
| 663 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0; | 630 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0; |
| 664 | 631 |
| 665 const ZoneGrowableArray<intptr_t>& data = cid_results(); | 632 const ZoneGrowableArray<intptr_t>& data = cid_results(); |
| 666 const intptr_t num_cases = data.length() / 2; | 633 const intptr_t num_cases = data.length() / 2; |
| 667 ASSERT(num_cases <= 255); | 634 ASSERT(num_cases <= 255); |
| 668 __ TestCids(value, num_cases); | 635 __ TestCids(value, num_cases); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 682 // If the cid is not in the list, jump to the opposite label from the cids | 649 // If the cid is not in the list, jump to the opposite label from the cids |
| 683 // that are in the list. These must be all the same (see asserts in the | 650 // that are in the list. These must be all the same (see asserts in the |
| 684 // constructor). | 651 // constructor). |
| 685 Label* target = result ? labels.false_label : labels.true_label; | 652 Label* target = result ? labels.false_label : labels.true_label; |
| 686 __ Jump(target); | 653 __ Jump(target); |
| 687 } | 654 } |
| 688 | 655 |
| 689 return NEXT_IS_TRUE; | 656 return NEXT_IS_TRUE; |
| 690 } | 657 } |
| 691 | 658 |
| 692 | |
| 693 Condition TestCidsInstr::GetNextInstructionCondition( | 659 Condition TestCidsInstr::GetNextInstructionCondition( |
| 694 FlowGraphCompiler* compiler, | 660 FlowGraphCompiler* compiler, |
| 695 BranchLabels labels) { | 661 BranchLabels labels) { |
| 696 return NEXT_IS_TRUE; | 662 return NEXT_IS_TRUE; |
| 697 } | 663 } |
| 698 | 664 |
| 699 | |
| 700 DEFINE_MAKE_LOCATION_SUMMARY(TestCids, | 665 DEFINE_MAKE_LOCATION_SUMMARY(TestCids, |
| 701 1, | 666 1, |
| 702 Location::RequiresRegister(), | 667 Location::RequiresRegister(), |
| 703 LocationSummary::kNoCall) | 668 LocationSummary::kNoCall) |
| 704 | 669 |
| 705 | |
| 706 EMIT_NATIVE_CODE(CreateArray, | 670 EMIT_NATIVE_CODE(CreateArray, |
| 707 2, | 671 2, |
| 708 Location::RequiresRegister(), | 672 Location::RequiresRegister(), |
| 709 LocationSummary::kCall) { | 673 LocationSummary::kCall) { |
| 710 if (compiler->is_optimizing()) { | 674 if (compiler->is_optimizing()) { |
| 711 const Register length = locs()->in(kLengthPos).reg(); | 675 const Register length = locs()->in(kLengthPos).reg(); |
| 712 const Register type_arguments = locs()->in(kElementTypePos).reg(); | 676 const Register type_arguments = locs()->in(kElementTypePos).reg(); |
| 713 const Register out = locs()->out(0).reg(); | 677 const Register out = locs()->out(0).reg(); |
| 714 __ CreateArrayOpt(out, length, type_arguments); | 678 __ CreateArrayOpt(out, length, type_arguments); |
| 715 __ Push(type_arguments); | 679 __ Push(type_arguments); |
| 716 __ Push(length); | 680 __ Push(length); |
| 717 __ CreateArrayTOS(); | 681 __ CreateArrayTOS(); |
| 718 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); | 682 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); |
| 719 __ PopLocal(out); | 683 __ PopLocal(out); |
| 720 } else { | 684 } else { |
| 721 __ CreateArrayTOS(); | 685 __ CreateArrayTOS(); |
| 722 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); | 686 compiler->RecordAfterCall(this, FlowGraphCompiler::kHasResult); |
| 723 } | 687 } |
| 724 } | 688 } |
| 725 | 689 |
| 726 | |
| 727 EMIT_NATIVE_CODE(StoreIndexed, | 690 EMIT_NATIVE_CODE(StoreIndexed, |
| 728 3, | 691 3, |
| 729 Location::NoLocation(), | 692 Location::NoLocation(), |
| 730 LocationSummary::kNoCall, | 693 LocationSummary::kNoCall, |
| 731 1) { | 694 1) { |
| 732 if (!compiler->is_optimizing()) { | 695 if (!compiler->is_optimizing()) { |
| 733 ASSERT(class_id() == kArrayCid); | 696 ASSERT(class_id() == kArrayCid); |
| 734 __ StoreIndexedTOS(); | 697 __ StoreIndexedTOS(); |
| 735 return; | 698 return; |
| 736 } | 699 } |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 799 __ StoreIndexedFloat64(array, temp, value); | 762 __ StoreIndexedFloat64(array, temp, value); |
| 800 } | 763 } |
| 801 break; | 764 break; |
| 802 default: | 765 default: |
| 803 Unsupported(compiler); | 766 Unsupported(compiler); |
| 804 UNREACHABLE(); | 767 UNREACHABLE(); |
| 805 break; | 768 break; |
| 806 } | 769 } |
| 807 } | 770 } |
| 808 | 771 |
| 809 | |
| 810 EMIT_NATIVE_CODE(LoadIndexed, | 772 EMIT_NATIVE_CODE(LoadIndexed, |
| 811 2, | 773 2, |
| 812 Location::RequiresRegister(), | 774 Location::RequiresRegister(), |
| 813 LocationSummary::kNoCall, | 775 LocationSummary::kNoCall, |
| 814 1) { | 776 1) { |
| 815 ASSERT(compiler->is_optimizing()); | 777 ASSERT(compiler->is_optimizing()); |
| 816 const Register array = locs()->in(0).reg(); | 778 const Register array = locs()->in(0).reg(); |
| 817 const Register index = locs()->in(1).reg(); | 779 const Register index = locs()->in(1).reg(); |
| 818 const Register temp = locs()->temp(0).reg(); | 780 const Register temp = locs()->temp(0).reg(); |
| 819 const Register result = locs()->out(0).reg(); | 781 const Register result = locs()->out(0).reg(); |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 912 __ LoadIndexedFloat64(result, array, temp); | 874 __ LoadIndexedFloat64(result, array, temp); |
| 913 } | 875 } |
| 914 break; | 876 break; |
| 915 default: | 877 default: |
| 916 Unsupported(compiler); | 878 Unsupported(compiler); |
| 917 UNREACHABLE(); | 879 UNREACHABLE(); |
| 918 break; | 880 break; |
| 919 } | 881 } |
| 920 } | 882 } |
| 921 | 883 |
| 922 | |
| 923 EMIT_NATIVE_CODE(StringInterpolate, | 884 EMIT_NATIVE_CODE(StringInterpolate, |
| 924 1, | 885 1, |
| 925 Location::RegisterLocation(0), | 886 Location::RegisterLocation(0), |
| 926 LocationSummary::kCall) { | 887 LocationSummary::kCall) { |
| 927 if (compiler->is_optimizing()) { | 888 if (compiler->is_optimizing()) { |
| 928 __ Push(locs()->in(0).reg()); | 889 __ Push(locs()->in(0).reg()); |
| 929 } | 890 } |
| 930 const intptr_t kTypeArgsLen = 0; | 891 const intptr_t kTypeArgsLen = 0; |
| 931 const intptr_t kArgumentCount = 1; | 892 const intptr_t kArgumentCount = 1; |
| 932 const Array& arguments_descriptor = Array::Handle(ArgumentsDescriptor::New( | 893 const Array& arguments_descriptor = Array::Handle(ArgumentsDescriptor::New( |
| 933 kTypeArgsLen, kArgumentCount, Object::null_array())); | 894 kTypeArgsLen, kArgumentCount, Object::null_array())); |
| 934 __ PushConstant(CallFunction()); | 895 __ PushConstant(CallFunction()); |
| 935 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); | 896 const intptr_t argdesc_kidx = __ AddConstant(arguments_descriptor); |
| 936 __ StaticCall(kArgumentCount, argdesc_kidx); | 897 __ StaticCall(kArgumentCount, argdesc_kidx); |
| 937 // Note: can't use RecordAfterCall here because | 898 // Note: can't use RecordAfterCall here because |
| 938 // StringInterpolateInstr::ArgumentCount() is 0. However | 899 // StringInterpolateInstr::ArgumentCount() is 0. However |
| 939 // internally it does a call with 1 argument which needs to | 900 // internally it does a call with 1 argument which needs to |
| 940 // be reflected in the lazy deoptimization environment. | 901 // be reflected in the lazy deoptimization environment. |
| 941 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 902 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 942 token_pos()); | 903 token_pos()); |
| 943 compiler->RecordAfterCallHelper(token_pos(), deopt_id(), kArgumentCount, | 904 compiler->RecordAfterCallHelper(token_pos(), deopt_id(), kArgumentCount, |
| 944 FlowGraphCompiler::kHasResult, locs()); | 905 FlowGraphCompiler::kHasResult, locs()); |
| 945 if (compiler->is_optimizing()) { | 906 if (compiler->is_optimizing()) { |
| 946 __ PopLocal(locs()->out(0).reg()); | 907 __ PopLocal(locs()->out(0).reg()); |
| 947 } | 908 } |
| 948 } | 909 } |
| 949 | 910 |
| 950 | |
| 951 EMIT_NATIVE_CODE(NativeCall, | 911 EMIT_NATIVE_CODE(NativeCall, |
| 952 0, | 912 0, |
| 953 Location::NoLocation(), | 913 Location::NoLocation(), |
| 954 LocationSummary::kCall) { | 914 LocationSummary::kCall) { |
| 955 SetupNative(); | 915 SetupNative(); |
| 956 | 916 |
| 957 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); | 917 const intptr_t argc_tag = NativeArguments::ComputeArgcTag(function()); |
| 958 | 918 |
| 959 ASSERT(!link_lazily()); | 919 ASSERT(!link_lazily()); |
| 960 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); | 920 const ExternalLabel label(reinterpret_cast<uword>(native_c_function())); |
| 961 const intptr_t target_kidx = | 921 const intptr_t target_kidx = |
| 962 __ object_pool_wrapper().FindNativeEntry(&label, kNotPatchable); | 922 __ object_pool_wrapper().FindNativeEntry(&label, kNotPatchable); |
| 963 const intptr_t argc_tag_kidx = | 923 const intptr_t argc_tag_kidx = |
| 964 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); | 924 __ object_pool_wrapper().FindImmediate(static_cast<uword>(argc_tag)); |
| 965 __ PushConstant(target_kidx); | 925 __ PushConstant(target_kidx); |
| 966 __ PushConstant(argc_tag_kidx); | 926 __ PushConstant(argc_tag_kidx); |
| 967 if (is_bootstrap_native()) { | 927 if (is_bootstrap_native()) { |
| 968 __ NativeBootstrapCall(); | 928 __ NativeBootstrapCall(); |
| 969 } else if (is_auto_scope()) { | 929 } else if (is_auto_scope()) { |
| 970 __ NativeAutoScopeCall(); | 930 __ NativeAutoScopeCall(); |
| 971 } else { | 931 } else { |
| 972 __ NativeNoScopeCall(); | 932 __ NativeNoScopeCall(); |
| 973 } | 933 } |
| 974 compiler->RecordSafepoint(locs()); | 934 compiler->RecordSafepoint(locs()); |
| 975 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 935 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 976 token_pos()); | 936 token_pos()); |
| 977 } | 937 } |
| 978 | 938 |
| 979 | |
| 980 EMIT_NATIVE_CODE(OneByteStringFromCharCode, | 939 EMIT_NATIVE_CODE(OneByteStringFromCharCode, |
| 981 1, | 940 1, |
| 982 Location::RequiresRegister(), | 941 Location::RequiresRegister(), |
| 983 LocationSummary::kNoCall) { | 942 LocationSummary::kNoCall) { |
| 984 ASSERT(compiler->is_optimizing()); | 943 ASSERT(compiler->is_optimizing()); |
| 985 const Register char_code = locs()->in(0).reg(); // Char code is a smi. | 944 const Register char_code = locs()->in(0).reg(); // Char code is a smi. |
| 986 const Register result = locs()->out(0).reg(); | 945 const Register result = locs()->out(0).reg(); |
| 987 __ OneByteStringFromCharCode(result, char_code); | 946 __ OneByteStringFromCharCode(result, char_code); |
| 988 } | 947 } |
| 989 | 948 |
| 990 | |
| 991 EMIT_NATIVE_CODE(StringToCharCode, | 949 EMIT_NATIVE_CODE(StringToCharCode, |
| 992 1, | 950 1, |
| 993 Location::RequiresRegister(), | 951 Location::RequiresRegister(), |
| 994 LocationSummary::kNoCall) { | 952 LocationSummary::kNoCall) { |
| 995 ASSERT(cid_ == kOneByteStringCid); | 953 ASSERT(cid_ == kOneByteStringCid); |
| 996 const Register str = locs()->in(0).reg(); | 954 const Register str = locs()->in(0).reg(); |
| 997 const Register result = locs()->out(0).reg(); // Result char code is a smi. | 955 const Register result = locs()->out(0).reg(); // Result char code is a smi. |
| 998 __ StringToCharCode(result, str); | 956 __ StringToCharCode(result, str); |
| 999 } | 957 } |
| 1000 | 958 |
| 1001 | |
| 1002 EMIT_NATIVE_CODE(AllocateObject, | 959 EMIT_NATIVE_CODE(AllocateObject, |
| 1003 0, | 960 0, |
| 1004 Location::RequiresRegister(), | 961 Location::RequiresRegister(), |
| 1005 LocationSummary::kCall) { | 962 LocationSummary::kCall) { |
| 1006 if (ArgumentCount() == 1) { | 963 if (ArgumentCount() == 1) { |
| 1007 // Allocate with type arguments. | 964 // Allocate with type arguments. |
| 1008 if (compiler->is_optimizing()) { | 965 if (compiler->is_optimizing()) { |
| 1009 // If we're optimizing, try a streamlined fastpath. | 966 // If we're optimizing, try a streamlined fastpath. |
| 1010 const intptr_t instance_size = cls().instance_size(); | 967 const intptr_t instance_size = cls().instance_size(); |
| 1011 Isolate* isolate = Isolate::Current(); | 968 Isolate* isolate = Isolate::Current(); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1059 __ PopLocal(locs()->out(0).reg()); | 1016 __ PopLocal(locs()->out(0).reg()); |
| 1060 } else { | 1017 } else { |
| 1061 const intptr_t kidx = __ AddConstant(cls()); | 1018 const intptr_t kidx = __ AddConstant(cls()); |
| 1062 __ Allocate(kidx); | 1019 __ Allocate(kidx); |
| 1063 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 1020 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 1064 token_pos()); | 1021 token_pos()); |
| 1065 compiler->RecordSafepoint(locs()); | 1022 compiler->RecordSafepoint(locs()); |
| 1066 } | 1023 } |
| 1067 } | 1024 } |
| 1068 | 1025 |
| 1069 | |
| 1070 EMIT_NATIVE_CODE(StoreInstanceField, 2) { | 1026 EMIT_NATIVE_CODE(StoreInstanceField, 2) { |
| 1071 ASSERT(!HasTemp()); | 1027 ASSERT(!HasTemp()); |
| 1072 ASSERT(offset_in_bytes() % kWordSize == 0); | 1028 ASSERT(offset_in_bytes() % kWordSize == 0); |
| 1073 if (compiler->is_optimizing()) { | 1029 if (compiler->is_optimizing()) { |
| 1074 const Register value = locs()->in(1).reg(); | 1030 const Register value = locs()->in(1).reg(); |
| 1075 const Register instance = locs()->in(0).reg(); | 1031 const Register instance = locs()->in(0).reg(); |
| 1076 if (Utils::IsInt(8, offset_in_bytes() / kWordSize)) { | 1032 if (Utils::IsInt(8, offset_in_bytes() / kWordSize)) { |
| 1077 __ StoreField(instance, offset_in_bytes() / kWordSize, value); | 1033 __ StoreField(instance, offset_in_bytes() / kWordSize, value); |
| 1078 } else { | 1034 } else { |
| 1079 __ StoreFieldExt(instance, value); | 1035 __ StoreFieldExt(instance, value); |
| 1080 __ Nop(offset_in_bytes() / kWordSize); | 1036 __ Nop(offset_in_bytes() / kWordSize); |
| 1081 } | 1037 } |
| 1082 } else { | 1038 } else { |
| 1083 __ StoreFieldTOS(offset_in_bytes() / kWordSize); | 1039 __ StoreFieldTOS(offset_in_bytes() / kWordSize); |
| 1084 } | 1040 } |
| 1085 } | 1041 } |
| 1086 | 1042 |
| 1087 | |
| 1088 EMIT_NATIVE_CODE(LoadField, 1, Location::RequiresRegister()) { | 1043 EMIT_NATIVE_CODE(LoadField, 1, Location::RequiresRegister()) { |
| 1089 ASSERT(offset_in_bytes() % kWordSize == 0); | 1044 ASSERT(offset_in_bytes() % kWordSize == 0); |
| 1090 if (compiler->is_optimizing()) { | 1045 if (compiler->is_optimizing()) { |
| 1091 const Register result = locs()->out(0).reg(); | 1046 const Register result = locs()->out(0).reg(); |
| 1092 const Register instance = locs()->in(0).reg(); | 1047 const Register instance = locs()->in(0).reg(); |
| 1093 if (Utils::IsInt(8, offset_in_bytes() / kWordSize)) { | 1048 if (Utils::IsInt(8, offset_in_bytes() / kWordSize)) { |
| 1094 __ LoadField(result, instance, offset_in_bytes() / kWordSize); | 1049 __ LoadField(result, instance, offset_in_bytes() / kWordSize); |
| 1095 } else { | 1050 } else { |
| 1096 __ LoadFieldExt(result, instance); | 1051 __ LoadFieldExt(result, instance); |
| 1097 __ Nop(offset_in_bytes() / kWordSize); | 1052 __ Nop(offset_in_bytes() / kWordSize); |
| 1098 } | 1053 } |
| 1099 } else { | 1054 } else { |
| 1100 __ LoadFieldTOS(offset_in_bytes() / kWordSize); | 1055 __ LoadFieldTOS(offset_in_bytes() / kWordSize); |
| 1101 } | 1056 } |
| 1102 } | 1057 } |
| 1103 | 1058 |
| 1104 | |
| 1105 EMIT_NATIVE_CODE(LoadUntagged, 1, Location::RequiresRegister()) { | 1059 EMIT_NATIVE_CODE(LoadUntagged, 1, Location::RequiresRegister()) { |
| 1106 const Register obj = locs()->in(0).reg(); | 1060 const Register obj = locs()->in(0).reg(); |
| 1107 const Register result = locs()->out(0).reg(); | 1061 const Register result = locs()->out(0).reg(); |
| 1108 if (object()->definition()->representation() == kUntagged) { | 1062 if (object()->definition()->representation() == kUntagged) { |
| 1109 __ LoadUntagged(result, obj, offset() / kWordSize); | 1063 __ LoadUntagged(result, obj, offset() / kWordSize); |
| 1110 } else { | 1064 } else { |
| 1111 ASSERT(object()->definition()->representation() == kTagged); | 1065 ASSERT(object()->definition()->representation() == kTagged); |
| 1112 __ LoadField(result, obj, offset() / kWordSize); | 1066 __ LoadField(result, obj, offset() / kWordSize); |
| 1113 } | 1067 } |
| 1114 } | 1068 } |
| 1115 | 1069 |
| 1116 | |
| 1117 EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) { | 1070 EMIT_NATIVE_CODE(BooleanNegate, 1, Location::RequiresRegister()) { |
| 1118 if (compiler->is_optimizing()) { | 1071 if (compiler->is_optimizing()) { |
| 1119 __ BooleanNegate(locs()->out(0).reg(), locs()->in(0).reg()); | 1072 __ BooleanNegate(locs()->out(0).reg(), locs()->in(0).reg()); |
| 1120 } else { | 1073 } else { |
| 1121 __ BooleanNegateTOS(); | 1074 __ BooleanNegateTOS(); |
| 1122 } | 1075 } |
| 1123 } | 1076 } |
| 1124 | 1077 |
| 1125 | |
| 1126 EMIT_NATIVE_CODE(AllocateContext, | 1078 EMIT_NATIVE_CODE(AllocateContext, |
| 1127 0, | 1079 0, |
| 1128 Location::RequiresRegister(), | 1080 Location::RequiresRegister(), |
| 1129 LocationSummary::kCall) { | 1081 LocationSummary::kCall) { |
| 1130 ASSERT(!compiler->is_optimizing()); | 1082 ASSERT(!compiler->is_optimizing()); |
| 1131 __ AllocateContext(num_context_variables()); | 1083 __ AllocateContext(num_context_variables()); |
| 1132 compiler->RecordSafepoint(locs()); | 1084 compiler->RecordSafepoint(locs()); |
| 1133 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 1085 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 1134 token_pos()); | 1086 token_pos()); |
| 1135 } | 1087 } |
| 1136 | 1088 |
| 1137 | |
| 1138 EMIT_NATIVE_CODE(AllocateUninitializedContext, | 1089 EMIT_NATIVE_CODE(AllocateUninitializedContext, |
| 1139 0, | 1090 0, |
| 1140 Location::RequiresRegister(), | 1091 Location::RequiresRegister(), |
| 1141 LocationSummary::kCall) { | 1092 LocationSummary::kCall) { |
| 1142 ASSERT(compiler->is_optimizing()); | 1093 ASSERT(compiler->is_optimizing()); |
| 1143 __ AllocateUninitializedContext(locs()->out(0).reg(), | 1094 __ AllocateUninitializedContext(locs()->out(0).reg(), |
| 1144 num_context_variables()); | 1095 num_context_variables()); |
| 1145 __ AllocateContext(num_context_variables()); | 1096 __ AllocateContext(num_context_variables()); |
| 1146 compiler->RecordSafepoint(locs()); | 1097 compiler->RecordSafepoint(locs()); |
| 1147 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 1098 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 1148 token_pos()); | 1099 token_pos()); |
| 1149 __ PopLocal(locs()->out(0).reg()); | 1100 __ PopLocal(locs()->out(0).reg()); |
| 1150 } | 1101 } |
| 1151 | 1102 |
| 1152 | |
| 1153 EMIT_NATIVE_CODE(CloneContext, | 1103 EMIT_NATIVE_CODE(CloneContext, |
| 1154 1, | 1104 1, |
| 1155 Location::RequiresRegister(), | 1105 Location::RequiresRegister(), |
| 1156 LocationSummary::kCall) { | 1106 LocationSummary::kCall) { |
| 1157 if (compiler->is_optimizing()) { | 1107 if (compiler->is_optimizing()) { |
| 1158 __ Push(locs()->in(0).reg()); | 1108 __ Push(locs()->in(0).reg()); |
| 1159 } | 1109 } |
| 1160 __ CloneContext(); | 1110 __ CloneContext(); |
| 1161 compiler->RecordSafepoint(locs()); | 1111 compiler->RecordSafepoint(locs()); |
| 1162 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 1112 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 1163 token_pos()); | 1113 token_pos()); |
| 1164 if (compiler->is_optimizing()) { | 1114 if (compiler->is_optimizing()) { |
| 1165 __ PopLocal(locs()->out(0).reg()); | 1115 __ PopLocal(locs()->out(0).reg()); |
| 1166 } | 1116 } |
| 1167 } | 1117 } |
| 1168 | 1118 |
| 1169 | |
| 1170 EMIT_NATIVE_CODE(CatchBlockEntry, 0) { | 1119 EMIT_NATIVE_CODE(CatchBlockEntry, 0) { |
| 1171 __ Bind(compiler->GetJumpLabel(this)); | 1120 __ Bind(compiler->GetJumpLabel(this)); |
| 1172 compiler->AddExceptionHandler(catch_try_index(), try_index(), | 1121 compiler->AddExceptionHandler(catch_try_index(), try_index(), |
| 1173 compiler->assembler()->CodeSize(), | 1122 compiler->assembler()->CodeSize(), |
| 1174 handler_token_pos(), is_generated(), | 1123 handler_token_pos(), is_generated(), |
| 1175 catch_handler_types_, needs_stacktrace()); | 1124 catch_handler_types_, needs_stacktrace()); |
| 1176 // On lazy deoptimization we patch the optimized code here to enter the | 1125 // On lazy deoptimization we patch the optimized code here to enter the |
| 1177 // deoptimization stub. | 1126 // deoptimization stub. |
| 1178 const intptr_t deopt_id = Thread::ToDeoptAfter(GetDeoptId()); | 1127 const intptr_t deopt_id = Thread::ToDeoptAfter(GetDeoptId()); |
| 1179 if (compiler->is_optimizing()) { | 1128 if (compiler->is_optimizing()) { |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1243 } else { | 1192 } else { |
| 1244 __ MoveSpecial(LocalVarIndex(0, exception_var().index()), | 1193 __ MoveSpecial(LocalVarIndex(0, exception_var().index()), |
| 1245 Simulator::kExceptionSpecialIndex); | 1194 Simulator::kExceptionSpecialIndex); |
| 1246 __ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()), | 1195 __ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()), |
| 1247 Simulator::kStackTraceSpecialIndex); | 1196 Simulator::kStackTraceSpecialIndex); |
| 1248 } | 1197 } |
| 1249 } | 1198 } |
| 1250 __ SetFrame(compiler->StackSize()); | 1199 __ SetFrame(compiler->StackSize()); |
| 1251 } | 1200 } |
| 1252 | 1201 |
| 1253 | |
| 1254 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { | 1202 EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) { |
| 1255 __ Throw(0); | 1203 __ Throw(0); |
| 1256 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 1204 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 1257 token_pos()); | 1205 token_pos()); |
| 1258 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); | 1206 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); |
| 1259 __ Trap(); | 1207 __ Trap(); |
| 1260 } | 1208 } |
| 1261 | 1209 |
| 1262 | |
| 1263 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) { | 1210 EMIT_NATIVE_CODE(ReThrow, 0, Location::NoLocation(), LocationSummary::kCall) { |
| 1264 compiler->SetNeedsStackTrace(catch_try_index()); | 1211 compiler->SetNeedsStackTrace(catch_try_index()); |
| 1265 __ Throw(1); | 1212 __ Throw(1); |
| 1266 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 1213 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 1267 token_pos()); | 1214 token_pos()); |
| 1268 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); | 1215 compiler->RecordAfterCall(this, FlowGraphCompiler::kNoResult); |
| 1269 __ Trap(); | 1216 __ Trap(); |
| 1270 } | 1217 } |
| 1271 | 1218 |
| 1272 EMIT_NATIVE_CODE(InstantiateType, | 1219 EMIT_NATIVE_CODE(InstantiateType, |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1298 type_arguments().IsRawWhenInstantiatedFromRaw(type_arguments().Length()), | 1245 type_arguments().IsRawWhenInstantiatedFromRaw(type_arguments().Length()), |
| 1299 __ AddConstant(type_arguments())); | 1246 __ AddConstant(type_arguments())); |
| 1300 compiler->RecordSafepoint(locs()); | 1247 compiler->RecordSafepoint(locs()); |
| 1301 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), | 1248 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id(), |
| 1302 token_pos()); | 1249 token_pos()); |
| 1303 if (compiler->is_optimizing()) { | 1250 if (compiler->is_optimizing()) { |
| 1304 __ PopLocal(locs()->out(0).reg()); | 1251 __ PopLocal(locs()->out(0).reg()); |
| 1305 } | 1252 } |
| 1306 } | 1253 } |
| 1307 | 1254 |
| 1308 | |
| 1309 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1255 void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1310 __ DebugStep(); | 1256 __ DebugStep(); |
| 1311 compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, token_pos()); | 1257 compiler->AddCurrentDescriptor(stub_kind_, deopt_id_, token_pos()); |
| 1312 } | 1258 } |
| 1313 | 1259 |
| 1314 | |
| 1315 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1260 void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1316 if (!compiler->CanFallThroughTo(normal_entry())) { | 1261 if (!compiler->CanFallThroughTo(normal_entry())) { |
| 1317 __ Jump(compiler->GetJumpLabel(normal_entry())); | 1262 __ Jump(compiler->GetJumpLabel(normal_entry())); |
| 1318 } | 1263 } |
| 1319 } | 1264 } |
| 1320 | 1265 |
| 1321 | |
| 1322 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { | 1266 LocationSummary* Instruction::MakeCallSummary(Zone* zone) { |
| 1323 LocationSummary* result = | 1267 LocationSummary* result = |
| 1324 new (zone) LocationSummary(zone, 0, 0, LocationSummary::kCall); | 1268 new (zone) LocationSummary(zone, 0, 0, LocationSummary::kCall); |
| 1325 // TODO(vegorov) support allocating out registers for calls. | 1269 // TODO(vegorov) support allocating out registers for calls. |
| 1326 // Currently we require them to be fixed. | 1270 // Currently we require them to be fixed. |
| 1327 result->set_out(0, Location::RegisterLocation(0)); | 1271 result->set_out(0, Location::RegisterLocation(0)); |
| 1328 return result; | 1272 return result; |
| 1329 } | 1273 } |
| 1330 | 1274 |
| 1331 | |
| 1332 CompileType BinaryUint32OpInstr::ComputeType() const { | 1275 CompileType BinaryUint32OpInstr::ComputeType() const { |
| 1333 return CompileType::Int(); | 1276 return CompileType::Int(); |
| 1334 } | 1277 } |
| 1335 | 1278 |
| 1336 | |
| 1337 CompileType ShiftUint32OpInstr::ComputeType() const { | 1279 CompileType ShiftUint32OpInstr::ComputeType() const { |
| 1338 return CompileType::Int(); | 1280 return CompileType::Int(); |
| 1339 } | 1281 } |
| 1340 | 1282 |
| 1341 | |
| 1342 CompileType UnaryUint32OpInstr::ComputeType() const { | 1283 CompileType UnaryUint32OpInstr::ComputeType() const { |
| 1343 return CompileType::Int(); | 1284 return CompileType::Int(); |
| 1344 } | 1285 } |
| 1345 | 1286 |
| 1346 | |
| 1347 CompileType LoadIndexedInstr::ComputeType() const { | 1287 CompileType LoadIndexedInstr::ComputeType() const { |
| 1348 switch (class_id_) { | 1288 switch (class_id_) { |
| 1349 case kArrayCid: | 1289 case kArrayCid: |
| 1350 case kImmutableArrayCid: | 1290 case kImmutableArrayCid: |
| 1351 return CompileType::Dynamic(); | 1291 return CompileType::Dynamic(); |
| 1352 | 1292 |
| 1353 case kTypedDataFloat32ArrayCid: | 1293 case kTypedDataFloat32ArrayCid: |
| 1354 case kTypedDataFloat64ArrayCid: | 1294 case kTypedDataFloat64ArrayCid: |
| 1355 return CompileType::FromCid(kDoubleCid); | 1295 return CompileType::FromCid(kDoubleCid); |
| 1356 case kTypedDataFloat32x4ArrayCid: | 1296 case kTypedDataFloat32x4ArrayCid: |
| (...skipping 19 matching lines...) Expand all Loading... |
| 1376 case kTypedDataInt32ArrayCid: | 1316 case kTypedDataInt32ArrayCid: |
| 1377 case kTypedDataUint32ArrayCid: | 1317 case kTypedDataUint32ArrayCid: |
| 1378 return CompileType::Int(); | 1318 return CompileType::Int(); |
| 1379 | 1319 |
| 1380 default: | 1320 default: |
| 1381 UNREACHABLE(); | 1321 UNREACHABLE(); |
| 1382 return CompileType::Dynamic(); | 1322 return CompileType::Dynamic(); |
| 1383 } | 1323 } |
| 1384 } | 1324 } |
| 1385 | 1325 |
| 1386 | |
| 1387 Representation LoadIndexedInstr::representation() const { | 1326 Representation LoadIndexedInstr::representation() const { |
| 1388 switch (class_id_) { | 1327 switch (class_id_) { |
| 1389 case kArrayCid: | 1328 case kArrayCid: |
| 1390 case kImmutableArrayCid: | 1329 case kImmutableArrayCid: |
| 1391 case kTypedDataInt8ArrayCid: | 1330 case kTypedDataInt8ArrayCid: |
| 1392 case kTypedDataUint8ArrayCid: | 1331 case kTypedDataUint8ArrayCid: |
| 1393 case kTypedDataUint8ClampedArrayCid: | 1332 case kTypedDataUint8ClampedArrayCid: |
| 1394 case kExternalTypedDataUint8ArrayCid: | 1333 case kExternalTypedDataUint8ArrayCid: |
| 1395 case kExternalTypedDataUint8ClampedArrayCid: | 1334 case kExternalTypedDataUint8ClampedArrayCid: |
| 1396 case kTypedDataInt16ArrayCid: | 1335 case kTypedDataInt16ArrayCid: |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1412 case kTypedDataFloat32x4ArrayCid: | 1351 case kTypedDataFloat32x4ArrayCid: |
| 1413 return kUnboxedFloat32x4; | 1352 return kUnboxedFloat32x4; |
| 1414 case kTypedDataFloat64x2ArrayCid: | 1353 case kTypedDataFloat64x2ArrayCid: |
| 1415 return kUnboxedFloat64x2; | 1354 return kUnboxedFloat64x2; |
| 1416 default: | 1355 default: |
| 1417 UNREACHABLE(); | 1356 UNREACHABLE(); |
| 1418 return kTagged; | 1357 return kTagged; |
| 1419 } | 1358 } |
| 1420 } | 1359 } |
| 1421 | 1360 |
| 1422 | |
| 1423 Representation StoreIndexedInstr::RequiredInputRepresentation( | 1361 Representation StoreIndexedInstr::RequiredInputRepresentation( |
| 1424 intptr_t idx) const { | 1362 intptr_t idx) const { |
| 1425 // Array can be a Dart object or a pointer to external data. | 1363 // Array can be a Dart object or a pointer to external data. |
| 1426 if (idx == 0) { | 1364 if (idx == 0) { |
| 1427 return kNoRepresentation; // Flexible input representation. | 1365 return kNoRepresentation; // Flexible input representation. |
| 1428 } | 1366 } |
| 1429 if (idx == 1) { | 1367 if (idx == 1) { |
| 1430 return kTagged; // Index is a smi. | 1368 return kTagged; // Index is a smi. |
| 1431 } | 1369 } |
| 1432 ASSERT(idx == 2); | 1370 ASSERT(idx == 2); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 1456 case kTypedDataInt32x4ArrayCid: | 1394 case kTypedDataInt32x4ArrayCid: |
| 1457 return kUnboxedInt32x4; | 1395 return kUnboxedInt32x4; |
| 1458 case kTypedDataFloat64x2ArrayCid: | 1396 case kTypedDataFloat64x2ArrayCid: |
| 1459 return kUnboxedFloat64x2; | 1397 return kUnboxedFloat64x2; |
| 1460 default: | 1398 default: |
| 1461 UNREACHABLE(); | 1399 UNREACHABLE(); |
| 1462 return kTagged; | 1400 return kTagged; |
| 1463 } | 1401 } |
| 1464 } | 1402 } |
| 1465 | 1403 |
| 1466 | |
| 1467 void Environment::DropArguments(intptr_t argc) { | 1404 void Environment::DropArguments(intptr_t argc) { |
| 1468 #if defined(DEBUG) | 1405 #if defined(DEBUG) |
| 1469 // Check that we are in the backend - register allocation has been run. | 1406 // Check that we are in the backend - register allocation has been run. |
| 1470 ASSERT(locations_ != NULL); | 1407 ASSERT(locations_ != NULL); |
| 1471 | 1408 |
| 1472 // Check that we are only dropping a valid number of instructions from the | 1409 // Check that we are only dropping a valid number of instructions from the |
| 1473 // environment. | 1410 // environment. |
| 1474 ASSERT(argc <= values_.length()); | 1411 ASSERT(argc <= values_.length()); |
| 1475 #endif | 1412 #endif |
| 1476 values_.TruncateTo(values_.length() - argc); | 1413 values_.TruncateTo(values_.length() - argc); |
| 1477 } | 1414 } |
| 1478 | 1415 |
| 1479 | |
| 1480 EMIT_NATIVE_CODE(CheckSmi, 1) { | 1416 EMIT_NATIVE_CODE(CheckSmi, 1) { |
| 1481 __ CheckSmi(locs()->in(0).reg()); | 1417 __ CheckSmi(locs()->in(0).reg()); |
| 1482 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckSmi, | 1418 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckSmi, |
| 1483 licm_hoisted_ ? ICData::kHoisted : 0); | 1419 licm_hoisted_ ? ICData::kHoisted : 0); |
| 1484 } | 1420 } |
| 1485 | 1421 |
| 1486 | |
| 1487 EMIT_NATIVE_CODE(CheckEitherNonSmi, 2) { | 1422 EMIT_NATIVE_CODE(CheckEitherNonSmi, 2) { |
| 1488 const Register left = locs()->in(0).reg(); | 1423 const Register left = locs()->in(0).reg(); |
| 1489 const Register right = locs()->in(1).reg(); | 1424 const Register right = locs()->in(1).reg(); |
| 1490 __ CheckEitherNonSmi(left, right); | 1425 __ CheckEitherNonSmi(left, right); |
| 1491 compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinaryDoubleOp, | 1426 compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinaryDoubleOp, |
| 1492 licm_hoisted_ ? ICData::kHoisted : 0); | 1427 licm_hoisted_ ? ICData::kHoisted : 0); |
| 1493 } | 1428 } |
| 1494 | 1429 |
| 1495 | |
| 1496 EMIT_NATIVE_CODE(CheckClassId, 1) { | 1430 EMIT_NATIVE_CODE(CheckClassId, 1) { |
| 1497 if (cids_.IsSingleCid()) { | 1431 if (cids_.IsSingleCid()) { |
| 1498 __ CheckClassId(locs()->in(0).reg(), | 1432 __ CheckClassId(locs()->in(0).reg(), |
| 1499 compiler->ToEmbeddableCid(cids_.cid_start, this)); | 1433 compiler->ToEmbeddableCid(cids_.cid_start, this)); |
| 1500 } else { | 1434 } else { |
| 1501 __ CheckClassIdRange(locs()->in(0).reg(), | 1435 __ CheckClassIdRange(locs()->in(0).reg(), |
| 1502 compiler->ToEmbeddableCid(cids_.cid_start, this)); | 1436 compiler->ToEmbeddableCid(cids_.cid_start, this)); |
| 1503 __ Nop(compiler->ToEmbeddableCid(cids_.Extent(), this)); | 1437 __ Nop(compiler->ToEmbeddableCid(cids_.Extent(), this)); |
| 1504 } | 1438 } |
| 1505 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass); | 1439 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass); |
| 1506 } | 1440 } |
| 1507 | 1441 |
| 1508 | |
| 1509 EMIT_NATIVE_CODE(CheckClass, 1) { | 1442 EMIT_NATIVE_CODE(CheckClass, 1) { |
| 1510 const Register value = locs()->in(0).reg(); | 1443 const Register value = locs()->in(0).reg(); |
| 1511 if (IsNullCheck()) { | 1444 if (IsNullCheck()) { |
| 1512 ASSERT(IsDeoptIfNull() || IsDeoptIfNotNull()); | 1445 ASSERT(IsDeoptIfNull() || IsDeoptIfNotNull()); |
| 1513 if (IsDeoptIfNull()) { | 1446 if (IsDeoptIfNull()) { |
| 1514 __ IfEqNull(value); | 1447 __ IfEqNull(value); |
| 1515 } else { | 1448 } else { |
| 1516 __ IfNeNull(value); | 1449 __ IfNeNull(value); |
| 1517 } | 1450 } |
| 1518 } else { | 1451 } else { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1563 if (using_ranges) { | 1496 if (using_ranges) { |
| 1564 __ Nop(compiler->ToEmbeddableCid(1 + cids_[i].Extent(), this)); | 1497 __ Nop(compiler->ToEmbeddableCid(1 + cids_[i].Extent(), this)); |
| 1565 } | 1498 } |
| 1566 } | 1499 } |
| 1567 } | 1500 } |
| 1568 } | 1501 } |
| 1569 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass, | 1502 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass, |
| 1570 licm_hoisted_ ? ICData::kHoisted : 0); | 1503 licm_hoisted_ ? ICData::kHoisted : 0); |
| 1571 } | 1504 } |
| 1572 | 1505 |
| 1573 | |
| 1574 EMIT_NATIVE_CODE(BinarySmiOp, 2, Location::RequiresRegister()) { | 1506 EMIT_NATIVE_CODE(BinarySmiOp, 2, Location::RequiresRegister()) { |
| 1575 const Register left = locs()->in(0).reg(); | 1507 const Register left = locs()->in(0).reg(); |
| 1576 const Register right = locs()->in(1).reg(); | 1508 const Register right = locs()->in(1).reg(); |
| 1577 const Register out = locs()->out(0).reg(); | 1509 const Register out = locs()->out(0).reg(); |
| 1578 const bool can_deopt = CanDeoptimize(); | 1510 const bool can_deopt = CanDeoptimize(); |
| 1579 bool needs_nop = false; | 1511 bool needs_nop = false; |
| 1580 switch (op_kind()) { | 1512 switch (op_kind()) { |
| 1581 case Token::kADD: | 1513 case Token::kADD: |
| 1582 __ Add(out, left, right); | 1514 __ Add(out, left, right); |
| 1583 needs_nop = true; | 1515 needs_nop = true; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1621 default: | 1553 default: |
| 1622 UNREACHABLE(); | 1554 UNREACHABLE(); |
| 1623 } | 1555 } |
| 1624 if (can_deopt) { | 1556 if (can_deopt) { |
| 1625 compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinarySmiOp); | 1557 compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinarySmiOp); |
| 1626 } else if (needs_nop) { | 1558 } else if (needs_nop) { |
| 1627 __ Nop(0); | 1559 __ Nop(0); |
| 1628 } | 1560 } |
| 1629 } | 1561 } |
| 1630 | 1562 |
| 1631 | |
| 1632 EMIT_NATIVE_CODE(UnarySmiOp, 1, Location::RequiresRegister()) { | 1563 EMIT_NATIVE_CODE(UnarySmiOp, 1, Location::RequiresRegister()) { |
| 1633 switch (op_kind()) { | 1564 switch (op_kind()) { |
| 1634 case Token::kNEGATE: { | 1565 case Token::kNEGATE: { |
| 1635 __ Neg(locs()->out(0).reg(), locs()->in(0).reg()); | 1566 __ Neg(locs()->out(0).reg(), locs()->in(0).reg()); |
| 1636 compiler->EmitDeopt(deopt_id(), ICData::kDeoptUnaryOp); | 1567 compiler->EmitDeopt(deopt_id(), ICData::kDeoptUnaryOp); |
| 1637 break; | 1568 break; |
| 1638 } | 1569 } |
| 1639 case Token::kBIT_NOT: | 1570 case Token::kBIT_NOT: |
| 1640 __ BitNot(locs()->out(0).reg(), locs()->in(0).reg()); | 1571 __ BitNot(locs()->out(0).reg(), locs()->in(0).reg()); |
| 1641 break; | 1572 break; |
| 1642 default: | 1573 default: |
| 1643 UNREACHABLE(); | 1574 UNREACHABLE(); |
| 1644 break; | 1575 break; |
| 1645 } | 1576 } |
| 1646 } | 1577 } |
| 1647 | 1578 |
| 1648 | |
| 1649 EMIT_NATIVE_CODE(Box, 1, Location::RequiresRegister(), LocationSummary::kCall) { | 1579 EMIT_NATIVE_CODE(Box, 1, Location::RequiresRegister(), LocationSummary::kCall) { |
| 1650 ASSERT(from_representation() == kUnboxedDouble); | 1580 ASSERT(from_representation() == kUnboxedDouble); |
| 1651 const Register value = locs()->in(0).reg(); | 1581 const Register value = locs()->in(0).reg(); |
| 1652 const Register out = locs()->out(0).reg(); | 1582 const Register out = locs()->out(0).reg(); |
| 1653 const intptr_t instance_size = compiler->double_class().instance_size(); | 1583 const intptr_t instance_size = compiler->double_class().instance_size(); |
| 1654 Isolate* isolate = Isolate::Current(); | 1584 Isolate* isolate = Isolate::Current(); |
| 1655 ASSERT(Heap::IsAllocatableInNewSpace(instance_size)); | 1585 ASSERT(Heap::IsAllocatableInNewSpace(instance_size)); |
| 1656 if (!compiler->double_class().TraceAllocation(isolate)) { | 1586 if (!compiler->double_class().TraceAllocation(isolate)) { |
| 1657 uword tags = 0; | 1587 uword tags = 0; |
| 1658 tags = RawObject::SizeTag::update(instance_size, tags); | 1588 tags = RawObject::SizeTag::update(instance_size, tags); |
| 1659 tags = RawObject::ClassIdTag::update(compiler->double_class().id(), tags); | 1589 tags = RawObject::ClassIdTag::update(compiler->double_class().id(), tags); |
| 1660 // tags also has the initial zero hash code on 64 bit. | 1590 // tags also has the initial zero hash code on 64 bit. |
| 1661 if (Smi::IsValid(tags)) { | 1591 if (Smi::IsValid(tags)) { |
| 1662 const intptr_t tags_kidx = __ AddConstant(Smi::Handle(Smi::New(tags))); | 1592 const intptr_t tags_kidx = __ AddConstant(Smi::Handle(Smi::New(tags))); |
| 1663 __ AllocateOpt(out, tags_kidx); | 1593 __ AllocateOpt(out, tags_kidx); |
| 1664 } | 1594 } |
| 1665 } | 1595 } |
| 1666 const intptr_t kidx = __ AddConstant(compiler->double_class()); | 1596 const intptr_t kidx = __ AddConstant(compiler->double_class()); |
| 1667 __ Allocate(kidx); | 1597 __ Allocate(kidx); |
| 1668 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, | 1598 compiler->AddCurrentDescriptor(RawPcDescriptors::kOther, Thread::kNoDeoptId, |
| 1669 token_pos()); | 1599 token_pos()); |
| 1670 compiler->RecordSafepoint(locs()); | 1600 compiler->RecordSafepoint(locs()); |
| 1671 __ PopLocal(out); | 1601 __ PopLocal(out); |
| 1672 __ WriteIntoDouble(out, value); | 1602 __ WriteIntoDouble(out, value); |
| 1673 } | 1603 } |
| 1674 | 1604 |
| 1675 | |
| 1676 EMIT_NATIVE_CODE(Unbox, 1, Location::RequiresRegister()) { | 1605 EMIT_NATIVE_CODE(Unbox, 1, Location::RequiresRegister()) { |
| 1677 ASSERT(representation() == kUnboxedDouble); | 1606 ASSERT(representation() == kUnboxedDouble); |
| 1678 const intptr_t value_cid = value()->Type()->ToCid(); | 1607 const intptr_t value_cid = value()->Type()->ToCid(); |
| 1679 const intptr_t box_cid = BoxCid(); | 1608 const intptr_t box_cid = BoxCid(); |
| 1680 const Register box = locs()->in(0).reg(); | 1609 const Register box = locs()->in(0).reg(); |
| 1681 const Register result = locs()->out(0).reg(); | 1610 const Register result = locs()->out(0).reg(); |
| 1682 if (value_cid == box_cid) { | 1611 if (value_cid == box_cid) { |
| 1683 __ UnboxDouble(result, box); | 1612 __ UnboxDouble(result, box); |
| 1684 } else if (CanConvertSmi() && (value_cid == kSmiCid)) { | 1613 } else if (CanConvertSmi() && (value_cid == kSmiCid)) { |
| 1685 __ SmiToDouble(result, box); | 1614 __ SmiToDouble(result, box); |
| 1686 } else if ((value()->Type()->ToNullableCid() == box_cid) && | 1615 } else if ((value()->Type()->ToNullableCid() == box_cid) && |
| 1687 value()->Type()->is_nullable()) { | 1616 value()->Type()->is_nullable()) { |
| 1688 __ IfEqNull(box); | 1617 __ IfEqNull(box); |
| 1689 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); | 1618 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); |
| 1690 __ UnboxDouble(result, box); | 1619 __ UnboxDouble(result, box); |
| 1691 } else { | 1620 } else { |
| 1692 __ CheckedUnboxDouble(result, box); | 1621 __ CheckedUnboxDouble(result, box); |
| 1693 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); | 1622 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptCheckClass); |
| 1694 } | 1623 } |
| 1695 } | 1624 } |
| 1696 | 1625 |
| 1697 | |
| 1698 EMIT_NATIVE_CODE(UnboxInteger32, 1, Location::RequiresRegister()) { | 1626 EMIT_NATIVE_CODE(UnboxInteger32, 1, Location::RequiresRegister()) { |
| 1699 #if defined(ARCH_IS_64_BIT) | 1627 #if defined(ARCH_IS_64_BIT) |
| 1700 const Register out = locs()->out(0).reg(); | 1628 const Register out = locs()->out(0).reg(); |
| 1701 const Register value = locs()->in(0).reg(); | 1629 const Register value = locs()->in(0).reg(); |
| 1702 const bool may_truncate = is_truncating() || !CanDeoptimize(); | 1630 const bool may_truncate = is_truncating() || !CanDeoptimize(); |
| 1703 __ UnboxInt32(out, value, may_truncate); | 1631 __ UnboxInt32(out, value, may_truncate); |
| 1704 if (CanDeoptimize()) { | 1632 if (CanDeoptimize()) { |
| 1705 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptUnboxInteger); | 1633 compiler->EmitDeopt(GetDeoptId(), ICData::kDeoptUnboxInteger); |
| 1706 } else { | 1634 } else { |
| 1707 __ Nop(0); | 1635 __ Nop(0); |
| 1708 } | 1636 } |
| 1709 #else | 1637 #else |
| 1710 Unsupported(compiler); | 1638 Unsupported(compiler); |
| 1711 UNREACHABLE(); | 1639 UNREACHABLE(); |
| 1712 #endif // defined(ARCH_IS_64_BIT) | 1640 #endif // defined(ARCH_IS_64_BIT) |
| 1713 } | 1641 } |
| 1714 | 1642 |
| 1715 | |
| 1716 EMIT_NATIVE_CODE(BoxInteger32, 1, Location::RequiresRegister()) { | 1643 EMIT_NATIVE_CODE(BoxInteger32, 1, Location::RequiresRegister()) { |
| 1717 #if defined(ARCH_IS_64_BIT) | 1644 #if defined(ARCH_IS_64_BIT) |
| 1718 const Register out = locs()->out(0).reg(); | 1645 const Register out = locs()->out(0).reg(); |
| 1719 const Register value = locs()->in(0).reg(); | 1646 const Register value = locs()->in(0).reg(); |
| 1720 if (from_representation() == kUnboxedInt32) { | 1647 if (from_representation() == kUnboxedInt32) { |
| 1721 __ BoxInt32(out, value); | 1648 __ BoxInt32(out, value); |
| 1722 } else { | 1649 } else { |
| 1723 ASSERT(from_representation() == kUnboxedUint32); | 1650 ASSERT(from_representation() == kUnboxedUint32); |
| 1724 __ BoxUint32(out, value); | 1651 __ BoxUint32(out, value); |
| 1725 } | 1652 } |
| 1726 #else | 1653 #else |
| 1727 Unsupported(compiler); | 1654 Unsupported(compiler); |
| 1728 UNREACHABLE(); | 1655 UNREACHABLE(); |
| 1729 #endif // defined(ARCH_IS_64_BIT) | 1656 #endif // defined(ARCH_IS_64_BIT) |
| 1730 } | 1657 } |
| 1731 | 1658 |
| 1732 | |
| 1733 EMIT_NATIVE_CODE(DoubleToSmi, 1, Location::RequiresRegister()) { | 1659 EMIT_NATIVE_CODE(DoubleToSmi, 1, Location::RequiresRegister()) { |
| 1734 const Register value = locs()->in(0).reg(); | 1660 const Register value = locs()->in(0).reg(); |
| 1735 const Register result = locs()->out(0).reg(); | 1661 const Register result = locs()->out(0).reg(); |
| 1736 __ DoubleToSmi(result, value); | 1662 __ DoubleToSmi(result, value); |
| 1737 compiler->EmitDeopt(deopt_id(), ICData::kDeoptDoubleToSmi); | 1663 compiler->EmitDeopt(deopt_id(), ICData::kDeoptDoubleToSmi); |
| 1738 } | 1664 } |
| 1739 | 1665 |
| 1740 | |
| 1741 EMIT_NATIVE_CODE(SmiToDouble, 1, Location::RequiresRegister()) { | 1666 EMIT_NATIVE_CODE(SmiToDouble, 1, Location::RequiresRegister()) { |
| 1742 const Register value = locs()->in(0).reg(); | 1667 const Register value = locs()->in(0).reg(); |
| 1743 const Register result = locs()->out(0).reg(); | 1668 const Register result = locs()->out(0).reg(); |
| 1744 __ SmiToDouble(result, value); | 1669 __ SmiToDouble(result, value); |
| 1745 } | 1670 } |
| 1746 | 1671 |
| 1747 | |
| 1748 EMIT_NATIVE_CODE(BinaryDoubleOp, 2, Location::RequiresRegister()) { | 1672 EMIT_NATIVE_CODE(BinaryDoubleOp, 2, Location::RequiresRegister()) { |
| 1749 const Register left = locs()->in(0).reg(); | 1673 const Register left = locs()->in(0).reg(); |
| 1750 const Register right = locs()->in(1).reg(); | 1674 const Register right = locs()->in(1).reg(); |
| 1751 const Register result = locs()->out(0).reg(); | 1675 const Register result = locs()->out(0).reg(); |
| 1752 switch (op_kind()) { | 1676 switch (op_kind()) { |
| 1753 case Token::kADD: | 1677 case Token::kADD: |
| 1754 __ DAdd(result, left, right); | 1678 __ DAdd(result, left, right); |
| 1755 break; | 1679 break; |
| 1756 case Token::kSUB: | 1680 case Token::kSUB: |
| 1757 __ DSub(result, left, right); | 1681 __ DSub(result, left, right); |
| 1758 break; | 1682 break; |
| 1759 case Token::kMUL: | 1683 case Token::kMUL: |
| 1760 __ DMul(result, left, right); | 1684 __ DMul(result, left, right); |
| 1761 break; | 1685 break; |
| 1762 case Token::kDIV: | 1686 case Token::kDIV: |
| 1763 __ DDiv(result, left, right); | 1687 __ DDiv(result, left, right); |
| 1764 break; | 1688 break; |
| 1765 default: | 1689 default: |
| 1766 UNREACHABLE(); | 1690 UNREACHABLE(); |
| 1767 } | 1691 } |
| 1768 } | 1692 } |
| 1769 | 1693 |
| 1770 | |
| 1771 Condition DoubleTestOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 1694 Condition DoubleTestOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 1772 BranchLabels labels) { | 1695 BranchLabels labels) { |
| 1773 ASSERT(compiler->is_optimizing()); | 1696 ASSERT(compiler->is_optimizing()); |
| 1774 const Register value = locs()->in(0).reg(); | 1697 const Register value = locs()->in(0).reg(); |
| 1775 switch (op_kind()) { | 1698 switch (op_kind()) { |
| 1776 case MethodRecognizer::kDouble_getIsNaN: | 1699 case MethodRecognizer::kDouble_getIsNaN: |
| 1777 __ DoubleIsNaN(value); | 1700 __ DoubleIsNaN(value); |
| 1778 break; | 1701 break; |
| 1779 case MethodRecognizer::kDouble_getIsInfinite: | 1702 case MethodRecognizer::kDouble_getIsInfinite: |
| 1780 __ DoubleIsInfinite(value); | 1703 __ DoubleIsInfinite(value); |
| 1781 break; | 1704 break; |
| 1782 default: | 1705 default: |
| 1783 UNREACHABLE(); | 1706 UNREACHABLE(); |
| 1784 } | 1707 } |
| 1785 const bool is_negated = kind() != Token::kEQ; | 1708 const bool is_negated = kind() != Token::kEQ; |
| 1786 return is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE; | 1709 return is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE; |
| 1787 } | 1710 } |
| 1788 | 1711 |
| 1789 | |
| 1790 Condition DoubleTestOpInstr::GetNextInstructionCondition( | 1712 Condition DoubleTestOpInstr::GetNextInstructionCondition( |
| 1791 FlowGraphCompiler* compiler, | 1713 FlowGraphCompiler* compiler, |
| 1792 BranchLabels labels) { | 1714 BranchLabels labels) { |
| 1793 const bool is_negated = kind() != Token::kEQ; | 1715 const bool is_negated = kind() != Token::kEQ; |
| 1794 return is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE; | 1716 return is_negated ? NEXT_IS_FALSE : NEXT_IS_TRUE; |
| 1795 } | 1717 } |
| 1796 | 1718 |
| 1797 | |
| 1798 DEFINE_MAKE_LOCATION_SUMMARY(DoubleTestOp, 1, Location::RequiresRegister()) | 1719 DEFINE_MAKE_LOCATION_SUMMARY(DoubleTestOp, 1, Location::RequiresRegister()) |
| 1799 | 1720 |
| 1800 | |
| 1801 EMIT_NATIVE_CODE(UnaryDoubleOp, 1, Location::RequiresRegister()) { | 1721 EMIT_NATIVE_CODE(UnaryDoubleOp, 1, Location::RequiresRegister()) { |
| 1802 const Register value = locs()->in(0).reg(); | 1722 const Register value = locs()->in(0).reg(); |
| 1803 const Register result = locs()->out(0).reg(); | 1723 const Register result = locs()->out(0).reg(); |
| 1804 __ DNeg(result, value); | 1724 __ DNeg(result, value); |
| 1805 } | 1725 } |
| 1806 | 1726 |
| 1807 | |
| 1808 EMIT_NATIVE_CODE(MathUnary, 1, Location::RequiresRegister()) { | 1727 EMIT_NATIVE_CODE(MathUnary, 1, Location::RequiresRegister()) { |
| 1809 const Register value = locs()->in(0).reg(); | 1728 const Register value = locs()->in(0).reg(); |
| 1810 const Register result = locs()->out(0).reg(); | 1729 const Register result = locs()->out(0).reg(); |
| 1811 if (kind() == MathUnaryInstr::kSqrt) { | 1730 if (kind() == MathUnaryInstr::kSqrt) { |
| 1812 __ DSqrt(result, value); | 1731 __ DSqrt(result, value); |
| 1813 } else if (kind() == MathUnaryInstr::kDoubleSquare) { | 1732 } else if (kind() == MathUnaryInstr::kDoubleSquare) { |
| 1814 __ DMul(result, value, value); | 1733 __ DMul(result, value, value); |
| 1815 } else { | 1734 } else { |
| 1816 Unsupported(compiler); | 1735 Unsupported(compiler); |
| 1817 UNREACHABLE(); | 1736 UNREACHABLE(); |
| 1818 } | 1737 } |
| 1819 } | 1738 } |
| 1820 | 1739 |
| 1821 | |
| 1822 EMIT_NATIVE_CODE(DoubleToDouble, 1, Location::RequiresRegister()) { | 1740 EMIT_NATIVE_CODE(DoubleToDouble, 1, Location::RequiresRegister()) { |
| 1823 const Register in = locs()->in(0).reg(); | 1741 const Register in = locs()->in(0).reg(); |
| 1824 const Register result = locs()->out(0).reg(); | 1742 const Register result = locs()->out(0).reg(); |
| 1825 switch (recognized_kind()) { | 1743 switch (recognized_kind()) { |
| 1826 case MethodRecognizer::kDoubleTruncate: | 1744 case MethodRecognizer::kDoubleTruncate: |
| 1827 __ DTruncate(result, in); | 1745 __ DTruncate(result, in); |
| 1828 break; | 1746 break; |
| 1829 case MethodRecognizer::kDoubleFloor: | 1747 case MethodRecognizer::kDoubleFloor: |
| 1830 __ DFloor(result, in); | 1748 __ DFloor(result, in); |
| 1831 break; | 1749 break; |
| 1832 case MethodRecognizer::kDoubleCeil: | 1750 case MethodRecognizer::kDoubleCeil: |
| 1833 __ DCeil(result, in); | 1751 __ DCeil(result, in); |
| 1834 break; | 1752 break; |
| 1835 default: | 1753 default: |
| 1836 UNREACHABLE(); | 1754 UNREACHABLE(); |
| 1837 } | 1755 } |
| 1838 } | 1756 } |
| 1839 | 1757 |
| 1840 | |
| 1841 EMIT_NATIVE_CODE(DoubleToFloat, 1, Location::RequiresRegister()) { | 1758 EMIT_NATIVE_CODE(DoubleToFloat, 1, Location::RequiresRegister()) { |
| 1842 const Register in = locs()->in(0).reg(); | 1759 const Register in = locs()->in(0).reg(); |
| 1843 const Register result = locs()->out(0).reg(); | 1760 const Register result = locs()->out(0).reg(); |
| 1844 __ DoubleToFloat(result, in); | 1761 __ DoubleToFloat(result, in); |
| 1845 } | 1762 } |
| 1846 | 1763 |
| 1847 | |
| 1848 EMIT_NATIVE_CODE(FloatToDouble, 1, Location::RequiresRegister()) { | 1764 EMIT_NATIVE_CODE(FloatToDouble, 1, Location::RequiresRegister()) { |
| 1849 const Register in = locs()->in(0).reg(); | 1765 const Register in = locs()->in(0).reg(); |
| 1850 const Register result = locs()->out(0).reg(); | 1766 const Register result = locs()->out(0).reg(); |
| 1851 __ FloatToDouble(result, in); | 1767 __ FloatToDouble(result, in); |
| 1852 } | 1768 } |
| 1853 | 1769 |
| 1854 | |
| 1855 EMIT_NATIVE_CODE(InvokeMathCFunction, | 1770 EMIT_NATIVE_CODE(InvokeMathCFunction, |
| 1856 InputCount(), | 1771 InputCount(), |
| 1857 Location::RequiresRegister()) { | 1772 Location::RequiresRegister()) { |
| 1858 const Register left = locs()->in(0).reg(); | 1773 const Register left = locs()->in(0).reg(); |
| 1859 const Register result = locs()->out(0).reg(); | 1774 const Register result = locs()->out(0).reg(); |
| 1860 if (recognized_kind() == MethodRecognizer::kMathDoublePow) { | 1775 if (recognized_kind() == MethodRecognizer::kMathDoublePow) { |
| 1861 const Register right = locs()->in(1).reg(); | 1776 const Register right = locs()->in(1).reg(); |
| 1862 __ DPow(result, left, right); | 1777 __ DPow(result, left, right); |
| 1863 } else if (recognized_kind() == MethodRecognizer::kDoubleMod) { | 1778 } else if (recognized_kind() == MethodRecognizer::kDoubleMod) { |
| 1864 const Register right = locs()->in(1).reg(); | 1779 const Register right = locs()->in(1).reg(); |
| 1865 __ DMod(result, left, right); | 1780 __ DMod(result, left, right); |
| 1866 } else if (recognized_kind() == MethodRecognizer::kMathSin) { | 1781 } else if (recognized_kind() == MethodRecognizer::kMathSin) { |
| 1867 __ DSin(result, left); | 1782 __ DSin(result, left); |
| 1868 } else if (recognized_kind() == MethodRecognizer::kMathCos) { | 1783 } else if (recognized_kind() == MethodRecognizer::kMathCos) { |
| 1869 __ DCos(result, left); | 1784 __ DCos(result, left); |
| 1870 } else { | 1785 } else { |
| 1871 Unsupported(compiler); | 1786 Unsupported(compiler); |
| 1872 UNREACHABLE(); | 1787 UNREACHABLE(); |
| 1873 } | 1788 } |
| 1874 } | 1789 } |
| 1875 | 1790 |
| 1876 | |
| 1877 EMIT_NATIVE_CODE(MathMinMax, 2, Location::RequiresRegister()) { | 1791 EMIT_NATIVE_CODE(MathMinMax, 2, Location::RequiresRegister()) { |
| 1878 ASSERT((op_kind() == MethodRecognizer::kMathMin) || | 1792 ASSERT((op_kind() == MethodRecognizer::kMathMin) || |
| 1879 (op_kind() == MethodRecognizer::kMathMax)); | 1793 (op_kind() == MethodRecognizer::kMathMax)); |
| 1880 const Register left = locs()->in(0).reg(); | 1794 const Register left = locs()->in(0).reg(); |
| 1881 const Register right = locs()->in(1).reg(); | 1795 const Register right = locs()->in(1).reg(); |
| 1882 const Register result = locs()->out(0).reg(); | 1796 const Register result = locs()->out(0).reg(); |
| 1883 if (result_cid() == kDoubleCid) { | 1797 if (result_cid() == kDoubleCid) { |
| 1884 if (op_kind() == MethodRecognizer::kMathMin) { | 1798 if (op_kind() == MethodRecognizer::kMathMin) { |
| 1885 __ DMin(result, left, right); | 1799 __ DMin(result, left, right); |
| 1886 } else { | 1800 } else { |
| 1887 __ DMax(result, left, right); | 1801 __ DMax(result, left, right); |
| 1888 } | 1802 } |
| 1889 } else { | 1803 } else { |
| 1890 ASSERT(result_cid() == kSmiCid); | 1804 ASSERT(result_cid() == kSmiCid); |
| 1891 if (op_kind() == MethodRecognizer::kMathMin) { | 1805 if (op_kind() == MethodRecognizer::kMathMin) { |
| 1892 __ Min(result, left, right); | 1806 __ Min(result, left, right); |
| 1893 } else { | 1807 } else { |
| 1894 __ Max(result, left, right); | 1808 __ Max(result, left, right); |
| 1895 } | 1809 } |
| 1896 } | 1810 } |
| 1897 } | 1811 } |
| 1898 | 1812 |
| 1899 | |
| 1900 static Token::Kind FlipCondition(Token::Kind kind) { | 1813 static Token::Kind FlipCondition(Token::Kind kind) { |
| 1901 switch (kind) { | 1814 switch (kind) { |
| 1902 case Token::kEQ: | 1815 case Token::kEQ: |
| 1903 return Token::kNE; | 1816 return Token::kNE; |
| 1904 case Token::kNE: | 1817 case Token::kNE: |
| 1905 return Token::kEQ; | 1818 return Token::kEQ; |
| 1906 case Token::kLT: | 1819 case Token::kLT: |
| 1907 return Token::kGTE; | 1820 return Token::kGTE; |
| 1908 case Token::kGT: | 1821 case Token::kGT: |
| 1909 return Token::kLTE; | 1822 return Token::kLTE; |
| 1910 case Token::kLTE: | 1823 case Token::kLTE: |
| 1911 return Token::kGT; | 1824 return Token::kGT; |
| 1912 case Token::kGTE: | 1825 case Token::kGTE: |
| 1913 return Token::kLT; | 1826 return Token::kLT; |
| 1914 default: | 1827 default: |
| 1915 UNREACHABLE(); | 1828 UNREACHABLE(); |
| 1916 return Token::kNE; | 1829 return Token::kNE; |
| 1917 } | 1830 } |
| 1918 } | 1831 } |
| 1919 | 1832 |
| 1920 | |
| 1921 static Bytecode::Opcode OpcodeForSmiCondition(Token::Kind kind) { | 1833 static Bytecode::Opcode OpcodeForSmiCondition(Token::Kind kind) { |
| 1922 switch (kind) { | 1834 switch (kind) { |
| 1923 case Token::kEQ: | 1835 case Token::kEQ: |
| 1924 return Bytecode::kIfEqStrict; | 1836 return Bytecode::kIfEqStrict; |
| 1925 case Token::kNE: | 1837 case Token::kNE: |
| 1926 return Bytecode::kIfNeStrict; | 1838 return Bytecode::kIfNeStrict; |
| 1927 case Token::kLT: | 1839 case Token::kLT: |
| 1928 return Bytecode::kIfLt; | 1840 return Bytecode::kIfLt; |
| 1929 case Token::kGT: | 1841 case Token::kGT: |
| 1930 return Bytecode::kIfGt; | 1842 return Bytecode::kIfGt; |
| 1931 case Token::kLTE: | 1843 case Token::kLTE: |
| 1932 return Bytecode::kIfLe; | 1844 return Bytecode::kIfLe; |
| 1933 case Token::kGTE: | 1845 case Token::kGTE: |
| 1934 return Bytecode::kIfGe; | 1846 return Bytecode::kIfGe; |
| 1935 default: | 1847 default: |
| 1936 UNREACHABLE(); | 1848 UNREACHABLE(); |
| 1937 return Bytecode::kTrap; | 1849 return Bytecode::kTrap; |
| 1938 } | 1850 } |
| 1939 } | 1851 } |
| 1940 | 1852 |
| 1941 | |
| 1942 static Bytecode::Opcode OpcodeForDoubleCondition(Token::Kind kind) { | 1853 static Bytecode::Opcode OpcodeForDoubleCondition(Token::Kind kind) { |
| 1943 switch (kind) { | 1854 switch (kind) { |
| 1944 case Token::kEQ: | 1855 case Token::kEQ: |
| 1945 return Bytecode::kIfDEq; | 1856 return Bytecode::kIfDEq; |
| 1946 case Token::kNE: | 1857 case Token::kNE: |
| 1947 return Bytecode::kIfDNe; | 1858 return Bytecode::kIfDNe; |
| 1948 case Token::kLT: | 1859 case Token::kLT: |
| 1949 return Bytecode::kIfDLt; | 1860 return Bytecode::kIfDLt; |
| 1950 case Token::kGT: | 1861 case Token::kGT: |
| 1951 return Bytecode::kIfDGt; | 1862 return Bytecode::kIfDGt; |
| 1952 case Token::kLTE: | 1863 case Token::kLTE: |
| 1953 return Bytecode::kIfDLe; | 1864 return Bytecode::kIfDLe; |
| 1954 case Token::kGTE: | 1865 case Token::kGTE: |
| 1955 return Bytecode::kIfDGe; | 1866 return Bytecode::kIfDGe; |
| 1956 default: | 1867 default: |
| 1957 UNREACHABLE(); | 1868 UNREACHABLE(); |
| 1958 return Bytecode::kTrap; | 1869 return Bytecode::kTrap; |
| 1959 } | 1870 } |
| 1960 } | 1871 } |
| 1961 | 1872 |
| 1962 | |
| 1963 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, | 1873 static Condition EmitSmiComparisonOp(FlowGraphCompiler* compiler, |
| 1964 LocationSummary* locs, | 1874 LocationSummary* locs, |
| 1965 Token::Kind kind, | 1875 Token::Kind kind, |
| 1966 BranchLabels labels) { | 1876 BranchLabels labels) { |
| 1967 const Register left = locs->in(0).reg(); | 1877 const Register left = locs->in(0).reg(); |
| 1968 const Register right = locs->in(1).reg(); | 1878 const Register right = locs->in(1).reg(); |
| 1969 Token::Kind comparison = kind; | 1879 Token::Kind comparison = kind; |
| 1970 Condition condition = NEXT_IS_TRUE; | 1880 Condition condition = NEXT_IS_TRUE; |
| 1971 if (labels.fall_through != labels.false_label) { | 1881 if (labels.fall_through != labels.false_label) { |
| 1972 // If we aren't falling through to the false label, we can save a Jump | 1882 // If we aren't falling through to the false label, we can save a Jump |
| 1973 // instruction in the case that the true case is the fall through by | 1883 // instruction in the case that the true case is the fall through by |
| 1974 // flipping the sense of the test such that the instruction following the | 1884 // flipping the sense of the test such that the instruction following the |
| 1975 // test is the Jump to the false label. In the case where both labels are | 1885 // test is the Jump to the false label. In the case where both labels are |
| 1976 // null we don't flip the sense of the test. | 1886 // null we don't flip the sense of the test. |
| 1977 condition = NEXT_IS_FALSE; | 1887 condition = NEXT_IS_FALSE; |
| 1978 comparison = FlipCondition(kind); | 1888 comparison = FlipCondition(kind); |
| 1979 } | 1889 } |
| 1980 __ Emit(Bytecode::Encode(OpcodeForSmiCondition(comparison), left, right)); | 1890 __ Emit(Bytecode::Encode(OpcodeForSmiCondition(comparison), left, right)); |
| 1981 return condition; | 1891 return condition; |
| 1982 } | 1892 } |
| 1983 | 1893 |
| 1984 | |
| 1985 static Condition EmitDoubleComparisonOp(FlowGraphCompiler* compiler, | 1894 static Condition EmitDoubleComparisonOp(FlowGraphCompiler* compiler, |
| 1986 LocationSummary* locs, | 1895 LocationSummary* locs, |
| 1987 Token::Kind kind) { | 1896 Token::Kind kind) { |
| 1988 const Register left = locs->in(0).reg(); | 1897 const Register left = locs->in(0).reg(); |
| 1989 const Register right = locs->in(1).reg(); | 1898 const Register right = locs->in(1).reg(); |
| 1990 Token::Kind comparison = kind; | 1899 Token::Kind comparison = kind; |
| 1991 // For double comparisons we can't flip the condition like with smi | 1900 // For double comparisons we can't flip the condition like with smi |
| 1992 // comparisons because of NaN which will compare false for all except != | 1901 // comparisons because of NaN which will compare false for all except != |
| 1993 // operations. | 1902 // operations. |
| 1994 // TODO(fschneider): Change the block order instead in DBC so that the | 1903 // TODO(fschneider): Change the block order instead in DBC so that the |
| 1995 // false block in always the fall-through block. | 1904 // false block in always the fall-through block. |
| 1996 Condition condition = NEXT_IS_TRUE; | 1905 Condition condition = NEXT_IS_TRUE; |
| 1997 __ Emit(Bytecode::Encode(OpcodeForDoubleCondition(comparison), left, right)); | 1906 __ Emit(Bytecode::Encode(OpcodeForDoubleCondition(comparison), left, right)); |
| 1998 return condition; | 1907 return condition; |
| 1999 } | 1908 } |
| 2000 | 1909 |
| 2001 | |
| 2002 Condition EqualityCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 1910 Condition EqualityCompareInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 2003 BranchLabels labels) { | 1911 BranchLabels labels) { |
| 2004 if (operation_cid() == kSmiCid) { | 1912 if (operation_cid() == kSmiCid) { |
| 2005 return EmitSmiComparisonOp(compiler, locs(), kind(), labels); | 1913 return EmitSmiComparisonOp(compiler, locs(), kind(), labels); |
| 2006 } else { | 1914 } else { |
| 2007 ASSERT(operation_cid() == kDoubleCid); | 1915 ASSERT(operation_cid() == kDoubleCid); |
| 2008 return EmitDoubleComparisonOp(compiler, locs(), kind()); | 1916 return EmitDoubleComparisonOp(compiler, locs(), kind()); |
| 2009 } | 1917 } |
| 2010 } | 1918 } |
| 2011 | 1919 |
| 2012 | |
| 2013 Condition EqualityCompareInstr::GetNextInstructionCondition( | 1920 Condition EqualityCompareInstr::GetNextInstructionCondition( |
| 2014 FlowGraphCompiler* compiler, | 1921 FlowGraphCompiler* compiler, |
| 2015 BranchLabels labels) { | 1922 BranchLabels labels) { |
| 2016 if (operation_cid() == kSmiCid) { | 1923 if (operation_cid() == kSmiCid) { |
| 2017 return (labels.fall_through != labels.false_label) ? NEXT_IS_FALSE | 1924 return (labels.fall_through != labels.false_label) ? NEXT_IS_FALSE |
| 2018 : NEXT_IS_TRUE; | 1925 : NEXT_IS_TRUE; |
| 2019 } else { | 1926 } else { |
| 2020 ASSERT(operation_cid() == kDoubleCid); | 1927 ASSERT(operation_cid() == kDoubleCid); |
| 2021 return NEXT_IS_TRUE; | 1928 return NEXT_IS_TRUE; |
| 2022 } | 1929 } |
| 2023 } | 1930 } |
| 2024 | 1931 |
| 2025 | |
| 2026 DEFINE_MAKE_LOCATION_SUMMARY(EqualityCompare, 2, Location::RequiresRegister()); | 1932 DEFINE_MAKE_LOCATION_SUMMARY(EqualityCompare, 2, Location::RequiresRegister()); |
| 2027 | 1933 |
| 2028 | |
| 2029 Condition RelationalOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, | 1934 Condition RelationalOpInstr::EmitComparisonCode(FlowGraphCompiler* compiler, |
| 2030 BranchLabels labels) { | 1935 BranchLabels labels) { |
| 2031 if (operation_cid() == kSmiCid) { | 1936 if (operation_cid() == kSmiCid) { |
| 2032 return EmitSmiComparisonOp(compiler, locs(), kind(), labels); | 1937 return EmitSmiComparisonOp(compiler, locs(), kind(), labels); |
| 2033 } else { | 1938 } else { |
| 2034 ASSERT(operation_cid() == kDoubleCid); | 1939 ASSERT(operation_cid() == kDoubleCid); |
| 2035 return EmitDoubleComparisonOp(compiler, locs(), kind()); | 1940 return EmitDoubleComparisonOp(compiler, locs(), kind()); |
| 2036 } | 1941 } |
| 2037 } | 1942 } |
| 2038 | 1943 |
| 2039 | |
| 2040 Condition RelationalOpInstr::GetNextInstructionCondition( | 1944 Condition RelationalOpInstr::GetNextInstructionCondition( |
| 2041 FlowGraphCompiler* compiler, | 1945 FlowGraphCompiler* compiler, |
| 2042 BranchLabels labels) { | 1946 BranchLabels labels) { |
| 2043 if (operation_cid() == kSmiCid) { | 1947 if (operation_cid() == kSmiCid) { |
| 2044 return (labels.fall_through != labels.false_label) ? NEXT_IS_FALSE | 1948 return (labels.fall_through != labels.false_label) ? NEXT_IS_FALSE |
| 2045 : NEXT_IS_TRUE; | 1949 : NEXT_IS_TRUE; |
| 2046 } else { | 1950 } else { |
| 2047 ASSERT(operation_cid() == kDoubleCid); | 1951 ASSERT(operation_cid() == kDoubleCid); |
| 2048 return NEXT_IS_TRUE; | 1952 return NEXT_IS_TRUE; |
| 2049 } | 1953 } |
| 2050 } | 1954 } |
| 2051 | 1955 |
| 2052 | |
| 2053 DEFINE_MAKE_LOCATION_SUMMARY(RelationalOp, 2, Location::RequiresRegister()) | 1956 DEFINE_MAKE_LOCATION_SUMMARY(RelationalOp, 2, Location::RequiresRegister()) |
| 2054 | 1957 |
| 2055 | |
| 2056 EMIT_NATIVE_CODE(CheckArrayBound, 2) { | 1958 EMIT_NATIVE_CODE(CheckArrayBound, 2) { |
| 2057 const Register length = locs()->in(kLengthPos).reg(); | 1959 const Register length = locs()->in(kLengthPos).reg(); |
| 2058 const Register index = locs()->in(kIndexPos).reg(); | 1960 const Register index = locs()->in(kIndexPos).reg(); |
| 2059 const intptr_t index_cid = this->index()->Type()->ToCid(); | 1961 const intptr_t index_cid = this->index()->Type()->ToCid(); |
| 2060 if (index_cid != kSmiCid) { | 1962 if (index_cid != kSmiCid) { |
| 2061 __ CheckSmi(index); | 1963 __ CheckSmi(index); |
| 2062 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound, | 1964 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound, |
| 2063 (generalized_ ? ICData::kGeneralized : 0) | | 1965 (generalized_ ? ICData::kGeneralized : 0) | |
| 2064 (licm_hoisted_ ? ICData::kHoisted : 0)); | 1966 (licm_hoisted_ ? ICData::kHoisted : 0)); |
| 2065 } | 1967 } |
| 2066 __ IfULe(length, index); | 1968 __ IfULe(length, index); |
| 2067 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound, | 1969 compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckArrayBound, |
| 2068 (generalized_ ? ICData::kGeneralized : 0) | | 1970 (generalized_ ? ICData::kGeneralized : 0) | |
| 2069 (licm_hoisted_ ? ICData::kHoisted : 0)); | 1971 (licm_hoisted_ ? ICData::kHoisted : 0)); |
| 2070 } | 1972 } |
| 2071 | 1973 |
| 2072 } // namespace dart | 1974 } // namespace dart |
| 2073 | 1975 |
| 2074 #endif // defined TARGET_ARCH_DBC | 1976 #endif // defined TARGET_ARCH_DBC |
| OLD | NEW |