| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 422 } else if (op->IsConstantOperand()) { | 422 } else if (op->IsConstantOperand()) { |
| 423 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); | 423 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op)); |
| 424 int src_index = DefineDeoptimizationLiteral(literal); | 424 int src_index = DefineDeoptimizationLiteral(literal); |
| 425 translation->StoreLiteral(src_index); | 425 translation->StoreLiteral(src_index); |
| 426 } else { | 426 } else { |
| 427 UNREACHABLE(); | 427 UNREACHABLE(); |
| 428 } | 428 } |
| 429 } | 429 } |
| 430 | 430 |
| 431 | 431 |
| 432 void LCodeGen::CallCode(Handle<Code> code, | 432 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 433 RelocInfo::Mode mode, | 433 RelocInfo::Mode mode, |
| 434 LInstruction* instr) { | 434 LInstruction* instr, |
| 435 SafepointMode safepoint_mode, |
| 436 int argc) { |
| 435 ASSERT(instr != NULL); | 437 ASSERT(instr != NULL); |
| 436 LPointerMap* pointers = instr->pointer_map(); | 438 LPointerMap* pointers = instr->pointer_map(); |
| 437 RecordPosition(pointers->position()); | 439 RecordPosition(pointers->position()); |
| 438 __ call(code, mode); | 440 __ call(code, mode); |
| 439 RegisterLazyDeoptimization(instr); | 441 RegisterLazyDeoptimization(instr, safepoint_mode, argc); |
| 440 | 442 |
| 441 // Signal that we don't inline smi code before these stubs in the | 443 // Signal that we don't inline smi code before these stubs in the |
| 442 // optimizing code generator. | 444 // optimizing code generator. |
| 443 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 445 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 444 code->kind() == Code::COMPARE_IC) { | 446 code->kind() == Code::COMPARE_IC) { |
| 445 __ nop(); | 447 __ nop(); |
| 446 } | 448 } |
| 447 } | 449 } |
| 448 | 450 |
| 449 | 451 |
| 452 void LCodeGen::CallCode(Handle<Code> code, |
| 453 RelocInfo::Mode mode, |
| 454 LInstruction* instr) { |
| 455 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 456 } |
| 457 |
| 458 |
| 450 void LCodeGen::CallRuntime(Runtime::Function* function, | 459 void LCodeGen::CallRuntime(Runtime::Function* function, |
| 451 int num_arguments, | 460 int num_arguments, |
| 452 LInstruction* instr) { | 461 LInstruction* instr) { |
| 453 ASSERT(instr != NULL); | 462 ASSERT(instr != NULL); |
| 454 ASSERT(instr->HasPointerMap()); | 463 ASSERT(instr->HasPointerMap()); |
| 455 LPointerMap* pointers = instr->pointer_map(); | 464 LPointerMap* pointers = instr->pointer_map(); |
| 456 RecordPosition(pointers->position()); | 465 RecordPosition(pointers->position()); |
| 457 | 466 |
| 458 __ CallRuntime(function, num_arguments); | 467 __ CallRuntime(function, num_arguments); |
| 459 RegisterLazyDeoptimization(instr); | 468 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 460 } | 469 } |
| 461 | 470 |
| 462 | 471 |
| 463 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 472 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, |
| 473 int argc, |
| 474 LInstruction* instr) { |
| 475 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 476 __ CallRuntimeSaveDoubles(id); |
| 477 RecordSafepointWithRegisters( |
| 478 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); |
| 479 } |
| 480 |
| 481 |
| 482 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, |
| 483 SafepointMode safepoint_mode, |
| 484 int argc) { |
| 464 // Create the environment to bailout to. If the call has side effects | 485 // Create the environment to bailout to. If the call has side effects |
| 465 // execution has to continue after the call otherwise execution can continue | 486 // execution has to continue after the call otherwise execution can continue |
| 466 // from a previous bailout point repeating the call. | 487 // from a previous bailout point repeating the call. |
| 467 LEnvironment* deoptimization_environment; | 488 LEnvironment* deoptimization_environment; |
| 468 if (instr->HasDeoptimizationEnvironment()) { | 489 if (instr->HasDeoptimizationEnvironment()) { |
| 469 deoptimization_environment = instr->deoptimization_environment(); | 490 deoptimization_environment = instr->deoptimization_environment(); |
| 470 } else { | 491 } else { |
| 471 deoptimization_environment = instr->environment(); | 492 deoptimization_environment = instr->environment(); |
| 472 } | 493 } |
| 473 | 494 |
| 474 RegisterEnvironmentForDeoptimization(deoptimization_environment); | 495 RegisterEnvironmentForDeoptimization(deoptimization_environment); |
| 475 RecordSafepoint(instr->pointer_map(), | 496 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { |
| 476 deoptimization_environment->deoptimization_index()); | 497 ASSERT(argc == 0); |
| 498 RecordSafepoint(instr->pointer_map(), |
| 499 deoptimization_environment->deoptimization_index()); |
| 500 } else { |
| 501 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS); |
| 502 RecordSafepointWithRegisters( |
| 503 instr->pointer_map(), |
| 504 argc, |
| 505 deoptimization_environment->deoptimization_index()); |
| 506 } |
| 477 } | 507 } |
| 478 | 508 |
| 479 | 509 |
| 480 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { | 510 void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) { |
| 481 if (!environment->HasBeenRegistered()) { | 511 if (!environment->HasBeenRegistered()) { |
| 482 // Physical stack frame layout: | 512 // Physical stack frame layout: |
| 483 // -x ............. -4 0 ..................................... y | 513 // -x ............. -4 0 ..................................... y |
| 484 // [incoming arguments] [spill slots] [pushed outgoing arguments] | 514 // [incoming arguments] [spill slots] [pushed outgoing arguments] |
| 485 | 515 |
| 486 // Layout of the environment: | 516 // Layout of the environment: |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 591 | 621 |
| 592 inlined_function_count_ = deoptimization_literals_.length(); | 622 inlined_function_count_ = deoptimization_literals_.length(); |
| 593 } | 623 } |
| 594 | 624 |
| 595 | 625 |
| 596 void LCodeGen::RecordSafepoint( | 626 void LCodeGen::RecordSafepoint( |
| 597 LPointerMap* pointers, | 627 LPointerMap* pointers, |
| 598 Safepoint::Kind kind, | 628 Safepoint::Kind kind, |
| 599 int arguments, | 629 int arguments, |
| 600 int deoptimization_index) { | 630 int deoptimization_index) { |
| 631 ASSERT(kind == expected_safepoint_kind_); |
| 632 |
| 601 const ZoneList<LOperand*>* operands = pointers->operands(); | 633 const ZoneList<LOperand*>* operands = pointers->operands(); |
| 602 | 634 |
| 603 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | 635 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 604 kind, arguments, deoptimization_index); | 636 kind, arguments, deoptimization_index); |
| 605 for (int i = 0; i < operands->length(); i++) { | 637 for (int i = 0; i < operands->length(); i++) { |
| 606 LOperand* pointer = operands->at(i); | 638 LOperand* pointer = operands->at(i); |
| 607 if (pointer->IsStackSlot()) { | 639 if (pointer->IsStackSlot()) { |
| 608 safepoint.DefinePointerSlot(pointer->index()); | 640 safepoint.DefinePointerSlot(pointer->index()); |
| 609 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { | 641 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 610 safepoint.DefinePointerRegister(ToRegister(pointer)); | 642 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| (...skipping 642 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1253 __ jmp(deferred_stack_check->entry()); | 1285 __ jmp(deferred_stack_check->entry()); |
| 1254 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); | 1286 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block)); |
| 1255 } else { | 1287 } else { |
| 1256 __ jmp(chunk_->GetAssemblyLabel(block)); | 1288 __ jmp(chunk_->GetAssemblyLabel(block)); |
| 1257 } | 1289 } |
| 1258 } | 1290 } |
| 1259 } | 1291 } |
| 1260 | 1292 |
| 1261 | 1293 |
| 1262 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { | 1294 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { |
| 1263 __ Pushad(); | 1295 PushSafepointRegistersScope scope(this); |
| 1264 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 1296 CallRuntimeFromDeferred(Runtime::kStackGuard, 0, instr); |
| 1265 RecordSafepointWithRegisters( | |
| 1266 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
| 1267 __ Popad(); | |
| 1268 } | 1297 } |
| 1269 | 1298 |
| 1270 | 1299 |
| 1271 void LCodeGen::DoGoto(LGoto* instr) { | 1300 void LCodeGen::DoGoto(LGoto* instr) { |
| 1272 class DeferredStackCheck: public LDeferredCode { | 1301 class DeferredStackCheck: public LDeferredCode { |
| 1273 public: | 1302 public: |
| 1274 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) | 1303 DeferredStackCheck(LCodeGen* codegen, LGoto* instr) |
| 1275 : LDeferredCode(codegen), instr_(instr) { } | 1304 : LDeferredCode(codegen), instr_(instr) { } |
| 1276 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } | 1305 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } |
| 1277 private: | 1306 private: |
| (...skipping 542 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1820 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); | 1849 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); |
| 1821 | 1850 |
| 1822 __ bind(&false_result); | 1851 __ bind(&false_result); |
| 1823 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); | 1852 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
| 1824 | 1853 |
| 1825 __ bind(deferred->exit()); | 1854 __ bind(deferred->exit()); |
| 1826 } | 1855 } |
| 1827 | 1856 |
| 1828 | 1857 |
| 1829 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 1858 void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
| 1830 __ PushSafepointRegisters(); | 1859 { |
| 1860 PushSafepointRegistersScope scope(this); |
| 1831 | 1861 |
| 1832 InstanceofStub stub(InstanceofStub::kNoFlags); | 1862 InstanceofStub stub(InstanceofStub::kNoFlags); |
| 1833 | 1863 |
| 1834 __ push(ToRegister(instr->InputAt(0))); | 1864 __ push(ToRegister(instr->InputAt(0))); |
| 1835 __ Push(instr->function()); | 1865 __ Push(instr->function()); |
| 1836 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1866 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1837 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1867 CallCodeGeneric(stub.GetCode(), |
| 1838 __ movq(kScratchRegister, rax); | 1868 RelocInfo::CODE_TARGET, |
| 1839 __ PopSafepointRegisters(); | 1869 instr, |
| 1870 RECORD_SAFEPOINT_WITH_REGISTERS, |
| 1871 2); |
| 1872 __ movq(kScratchRegister, rax); |
| 1873 } |
| 1840 __ testq(kScratchRegister, kScratchRegister); | 1874 __ testq(kScratchRegister, kScratchRegister); |
| 1841 Label load_false; | 1875 Label load_false; |
| 1842 Label done; | 1876 Label done; |
| 1843 __ j(not_zero, &load_false); | 1877 __ j(not_zero, &load_false); |
| 1844 __ LoadRoot(rax, Heap::kTrueValueRootIndex); | 1878 __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
| 1845 __ jmp(&done); | 1879 __ jmp(&done); |
| 1846 __ bind(&load_false); | 1880 __ bind(&load_false); |
| 1847 __ LoadRoot(rax, Heap::kFalseValueRootIndex); | 1881 __ LoadRoot(rax, Heap::kFalseValueRootIndex); |
| 1848 __ bind(&done); | 1882 __ bind(&done); |
| 1849 } | 1883 } |
| (...skipping 435 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2285 RecordPosition(pointers->position()); | 2319 RecordPosition(pointers->position()); |
| 2286 | 2320 |
| 2287 // Invoke function. | 2321 // Invoke function. |
| 2288 if (*function == *graph()->info()->closure()) { | 2322 if (*function == *graph()->info()->closure()) { |
| 2289 __ CallSelf(); | 2323 __ CallSelf(); |
| 2290 } else { | 2324 } else { |
| 2291 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 2325 __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
| 2292 } | 2326 } |
| 2293 | 2327 |
| 2294 // Setup deoptimization. | 2328 // Setup deoptimization. |
| 2295 RegisterLazyDeoptimization(instr); | 2329 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 2296 | 2330 |
| 2297 // Restore context. | 2331 // Restore context. |
| 2298 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2332 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2299 } | 2333 } |
| 2300 | 2334 |
| 2301 | 2335 |
| 2302 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2336 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 2303 ASSERT(ToRegister(instr->result()).is(rax)); | 2337 ASSERT(ToRegister(instr->result()).is(rax)); |
| 2304 __ Move(rdi, instr->function()); | 2338 __ Move(rdi, instr->function()); |
| 2305 CallKnownFunction(instr->function(), instr->arity(), instr); | 2339 CallKnownFunction(instr->function(), instr->arity(), instr); |
| 2306 } | 2340 } |
| 2307 | 2341 |
| 2308 | 2342 |
| 2309 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { | 2343 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
| 2310 Register input_reg = ToRegister(instr->InputAt(0)); | 2344 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2311 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), | 2345 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 2312 Heap::kHeapNumberMapRootIndex); | 2346 Heap::kHeapNumberMapRootIndex); |
| 2313 DeoptimizeIf(not_equal, instr->environment()); | 2347 DeoptimizeIf(not_equal, instr->environment()); |
| 2314 | 2348 |
| 2315 Label done; | 2349 Label done; |
| 2316 Register tmp = input_reg.is(rax) ? rcx : rax; | 2350 Register tmp = input_reg.is(rax) ? rcx : rax; |
| 2317 Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx; | 2351 Register tmp2 = tmp.is(rcx) ? rdx : input_reg.is(rcx) ? rdx : rcx; |
| 2318 | 2352 |
| 2319 // Preserve the value of all registers. | 2353 // Preserve the value of all registers. |
| 2320 __ PushSafepointRegisters(); | 2354 PushSafepointRegistersScope scope(this); |
| 2321 | 2355 |
| 2322 Label negative; | 2356 Label negative; |
| 2323 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); | 2357 __ movl(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset)); |
| 2324 // Check the sign of the argument. If the argument is positive, just | 2358 // Check the sign of the argument. If the argument is positive, just |
| 2325 // return it. We do not need to patch the stack since |input| and | 2359 // return it. We do not need to patch the stack since |input| and |
| 2326 // |result| are the same register and |input| will be restored | 2360 // |result| are the same register and |input| will be restored |
| 2327 // unchanged by popping safepoint registers. | 2361 // unchanged by popping safepoint registers. |
| 2328 __ testl(tmp, Immediate(HeapNumber::kSignMask)); | 2362 __ testl(tmp, Immediate(HeapNumber::kSignMask)); |
| 2329 __ j(not_zero, &negative); | 2363 __ j(not_zero, &negative); |
| 2330 __ jmp(&done); | 2364 __ jmp(&done); |
| 2331 | 2365 |
| 2332 __ bind(&negative); | 2366 __ bind(&negative); |
| 2333 | 2367 |
| 2334 Label allocated, slow; | 2368 Label allocated, slow; |
| 2335 __ AllocateHeapNumber(tmp, tmp2, &slow); | 2369 __ AllocateHeapNumber(tmp, tmp2, &slow); |
| 2336 __ jmp(&allocated); | 2370 __ jmp(&allocated); |
| 2337 | 2371 |
| 2338 // Slow case: Call the runtime system to do the number allocation. | 2372 // Slow case: Call the runtime system to do the number allocation. |
| 2339 __ bind(&slow); | 2373 __ bind(&slow); |
| 2340 | 2374 |
| 2341 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 2375 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
| 2342 RecordSafepointWithRegisters( | |
| 2343 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | |
| 2344 // Set the pointer to the new heap number in tmp. | 2376 // Set the pointer to the new heap number in tmp. |
| 2345 if (!tmp.is(rax)) { | 2377 if (!tmp.is(rax)) { |
| 2346 __ movq(tmp, rax); | 2378 __ movq(tmp, rax); |
| 2347 } | 2379 } |
| 2348 | 2380 |
| 2349 // Restore input_reg after call to runtime. | 2381 // Restore input_reg after call to runtime. |
| 2350 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 2382 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
| 2351 | 2383 |
| 2352 __ bind(&allocated); | 2384 __ bind(&allocated); |
| 2353 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 2385 __ movq(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 2354 __ shl(tmp2, Immediate(1)); | 2386 __ shl(tmp2, Immediate(1)); |
| 2355 __ shr(tmp2, Immediate(1)); | 2387 __ shr(tmp2, Immediate(1)); |
| 2356 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); | 2388 __ movq(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); |
| 2357 __ StoreToSafepointRegisterSlot(input_reg, tmp); | 2389 __ StoreToSafepointRegisterSlot(input_reg, tmp); |
| 2358 | 2390 |
| 2359 __ bind(&done); | 2391 __ bind(&done); |
| 2360 __ PopSafepointRegisters(); | |
| 2361 } | 2392 } |
| 2362 | 2393 |
| 2363 | 2394 |
| 2364 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { | 2395 void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) { |
| 2365 Register input_reg = ToRegister(instr->InputAt(0)); | 2396 Register input_reg = ToRegister(instr->InputAt(0)); |
| 2366 __ testl(input_reg, input_reg); | 2397 __ testl(input_reg, input_reg); |
| 2367 Label is_positive; | 2398 Label is_positive; |
| 2368 __ j(not_sign, &is_positive); | 2399 __ j(not_sign, &is_positive); |
| 2369 __ negl(input_reg); // Sets flags. | 2400 __ negl(input_reg); // Sets flags. |
| 2370 DeoptimizeIf(negative, instr->environment()); | 2401 DeoptimizeIf(negative, instr->environment()); |
| (...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2877 | 2908 |
| 2878 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { | 2909 void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) { |
| 2879 Register string = ToRegister(instr->string()); | 2910 Register string = ToRegister(instr->string()); |
| 2880 Register result = ToRegister(instr->result()); | 2911 Register result = ToRegister(instr->result()); |
| 2881 | 2912 |
| 2882 // TODO(3095996): Get rid of this. For now, we need to make the | 2913 // TODO(3095996): Get rid of this. For now, we need to make the |
| 2883 // result register contain a valid pointer because it is already | 2914 // result register contain a valid pointer because it is already |
| 2884 // contained in the register pointer map. | 2915 // contained in the register pointer map. |
| 2885 __ Set(result, 0); | 2916 __ Set(result, 0); |
| 2886 | 2917 |
| 2887 __ PushSafepointRegisters(); | 2918 PushSafepointRegistersScope scope(this); |
| 2888 __ push(string); | 2919 __ push(string); |
| 2889 // Push the index as a smi. This is safe because of the checks in | 2920 // Push the index as a smi. This is safe because of the checks in |
| 2890 // DoStringCharCodeAt above. | 2921 // DoStringCharCodeAt above. |
| 2891 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); | 2922 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue); |
| 2892 if (instr->index()->IsConstantOperand()) { | 2923 if (instr->index()->IsConstantOperand()) { |
| 2893 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 2924 int const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2894 __ Push(Smi::FromInt(const_index)); | 2925 __ Push(Smi::FromInt(const_index)); |
| 2895 } else { | 2926 } else { |
| 2896 Register index = ToRegister(instr->index()); | 2927 Register index = ToRegister(instr->index()); |
| 2897 __ Integer32ToSmi(index, index); | 2928 __ Integer32ToSmi(index, index); |
| 2898 __ push(index); | 2929 __ push(index); |
| 2899 } | 2930 } |
| 2900 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2931 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2, instr); |
| 2901 __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt); | |
| 2902 RecordSafepointWithRegisters( | |
| 2903 instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex); | |
| 2904 if (FLAG_debug_code) { | 2932 if (FLAG_debug_code) { |
| 2905 __ AbortIfNotSmi(rax); | 2933 __ AbortIfNotSmi(rax); |
| 2906 } | 2934 } |
| 2907 __ SmiToInteger32(rax, rax); | 2935 __ SmiToInteger32(rax, rax); |
| 2908 __ StoreToSafepointRegisterSlot(result, rax); | 2936 __ StoreToSafepointRegisterSlot(result, rax); |
| 2909 __ PopSafepointRegisters(); | |
| 2910 } | 2937 } |
| 2911 | 2938 |
| 2912 | 2939 |
| 2913 void LCodeGen::DoStringLength(LStringLength* instr) { | 2940 void LCodeGen::DoStringLength(LStringLength* instr) { |
| 2914 Register string = ToRegister(instr->string()); | 2941 Register string = ToRegister(instr->string()); |
| 2915 Register result = ToRegister(instr->result()); | 2942 Register result = ToRegister(instr->result()); |
| 2916 __ movq(result, FieldOperand(string, String::kLengthOffset)); | 2943 __ movq(result, FieldOperand(string, String::kLengthOffset)); |
| 2917 } | 2944 } |
| 2918 | 2945 |
| 2919 | 2946 |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2964 } | 2991 } |
| 2965 | 2992 |
| 2966 | 2993 |
| 2967 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 2994 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
| 2968 // TODO(3095996): Get rid of this. For now, we need to make the | 2995 // TODO(3095996): Get rid of this. For now, we need to make the |
| 2969 // result register contain a valid pointer because it is already | 2996 // result register contain a valid pointer because it is already |
| 2970 // contained in the register pointer map. | 2997 // contained in the register pointer map. |
| 2971 Register reg = ToRegister(instr->result()); | 2998 Register reg = ToRegister(instr->result()); |
| 2972 __ Move(reg, Smi::FromInt(0)); | 2999 __ Move(reg, Smi::FromInt(0)); |
| 2973 | 3000 |
| 2974 __ PushSafepointRegisters(); | 3001 { |
| 2975 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 3002 PushSafepointRegistersScope scope(this); |
| 2976 RecordSafepointWithRegisters( | 3003 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr); |
| 2977 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex); | 3004 // Ensure that value in rax survives popping registers. |
| 2978 // Ensure that value in rax survives popping registers. | 3005 __ movq(kScratchRegister, rax); |
| 2979 __ movq(kScratchRegister, rax); | 3006 } |
| 2980 __ PopSafepointRegisters(); | |
| 2981 __ movq(reg, kScratchRegister); | 3007 __ movq(reg, kScratchRegister); |
| 2982 } | 3008 } |
| 2983 | 3009 |
| 2984 | 3010 |
| 2985 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 3011 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 2986 ASSERT(instr->InputAt(0)->Equals(instr->result())); | 3012 ASSERT(instr->InputAt(0)->Equals(instr->result())); |
| 2987 Register input = ToRegister(instr->InputAt(0)); | 3013 Register input = ToRegister(instr->InputAt(0)); |
| 2988 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 3014 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| 2989 __ Integer32ToSmi(input, input); | 3015 __ Integer32ToSmi(input, input); |
| 2990 } | 3016 } |
| (...skipping 633 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3624 RegisterEnvironmentForDeoptimization(environment); | 3650 RegisterEnvironmentForDeoptimization(environment); |
| 3625 ASSERT(osr_pc_offset_ == -1); | 3651 ASSERT(osr_pc_offset_ == -1); |
| 3626 osr_pc_offset_ = masm()->pc_offset(); | 3652 osr_pc_offset_ = masm()->pc_offset(); |
| 3627 } | 3653 } |
| 3628 | 3654 |
| 3629 #undef __ | 3655 #undef __ |
| 3630 | 3656 |
| 3631 } } // namespace v8::internal | 3657 } } // namespace v8::internal |
| 3632 | 3658 |
| 3633 #endif // V8_TARGET_ARCH_X64 | 3659 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |