| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 423 } | 423 } |
| 424 } | 424 } |
| 425 | 425 |
| 426 | 426 |
| 427 void LCodeGen::DoCallFunction(LCallFunction* instr) { | 427 void LCodeGen::DoCallFunction(LCallFunction* instr) { |
| 428 ASSERT(ToRegister(instr->context()).is(cp)); | 428 ASSERT(ToRegister(instr->context()).is(cp)); |
| 429 ASSERT(ToRegister(instr->function()).Is(x1)); | 429 ASSERT(ToRegister(instr->function()).Is(x1)); |
| 430 ASSERT(ToRegister(instr->result()).Is(x0)); | 430 ASSERT(ToRegister(instr->result()).Is(x0)); |
| 431 | 431 |
| 432 int arity = instr->arity(); | 432 int arity = instr->arity(); |
| 433 CallFunctionStub stub(arity, instr->hydrogen()->function_flags()); | 433 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); |
| 434 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 434 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 435 } | 435 } |
| 436 | 436 |
| 437 | 437 |
| 438 void LCodeGen::DoCallNew(LCallNew* instr) { | 438 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 439 ASSERT(ToRegister(instr->context()).is(cp)); | 439 ASSERT(ToRegister(instr->context()).is(cp)); |
| 440 ASSERT(instr->IsMarkedAsCall()); | 440 ASSERT(instr->IsMarkedAsCall()); |
| 441 ASSERT(ToRegister(instr->constructor()).is(x1)); | 441 ASSERT(ToRegister(instr->constructor()).is(x1)); |
| 442 | 442 |
| 443 __ Mov(x0, instr->arity()); | 443 __ Mov(x0, instr->arity()); |
| 444 // No cell in x2 for construct type feedback in optimized code. | 444 // No cell in x2 for construct type feedback in optimized code. |
| 445 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | 445 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); |
| 446 | 446 |
| 447 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); | 447 CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS); |
| 448 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 448 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 449 | 449 |
| 450 ASSERT(ToRegister(instr->result()).is(x0)); | 450 ASSERT(ToRegister(instr->result()).is(x0)); |
| 451 } | 451 } |
| 452 | 452 |
| 453 | 453 |
| 454 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { | 454 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { |
| 455 ASSERT(instr->IsMarkedAsCall()); | 455 ASSERT(instr->IsMarkedAsCall()); |
| 456 ASSERT(ToRegister(instr->context()).is(cp)); | 456 ASSERT(ToRegister(instr->context()).is(cp)); |
| 457 ASSERT(ToRegister(instr->constructor()).is(x1)); | 457 ASSERT(ToRegister(instr->constructor()).is(x1)); |
| 458 | 458 |
| 459 __ Mov(x0, Operand(instr->arity())); | 459 __ Mov(x0, Operand(instr->arity())); |
| 460 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | 460 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); |
| 461 | 461 |
| 462 ElementsKind kind = instr->hydrogen()->elements_kind(); | 462 ElementsKind kind = instr->hydrogen()->elements_kind(); |
| 463 AllocationSiteOverrideMode override_mode = | 463 AllocationSiteOverrideMode override_mode = |
| 464 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) | 464 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) |
| 465 ? DISABLE_ALLOCATION_SITES | 465 ? DISABLE_ALLOCATION_SITES |
| 466 : DONT_OVERRIDE; | 466 : DONT_OVERRIDE; |
| 467 | 467 |
| 468 if (instr->arity() == 0) { | 468 if (instr->arity() == 0) { |
| 469 ArrayNoArgumentConstructorStub stub(kind, override_mode); | 469 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode); |
| 470 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 470 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 471 } else if (instr->arity() == 1) { | 471 } else if (instr->arity() == 1) { |
| 472 Label done; | 472 Label done; |
| 473 if (IsFastPackedElementsKind(kind)) { | 473 if (IsFastPackedElementsKind(kind)) { |
| 474 Label packed_case; | 474 Label packed_case; |
| 475 | 475 |
| 476 // We might need to create a holey array; look at the first argument. | 476 // We might need to create a holey array; look at the first argument. |
| 477 __ Peek(x10, 0); | 477 __ Peek(x10, 0); |
| 478 __ Cbz(x10, &packed_case); | 478 __ Cbz(x10, &packed_case); |
| 479 | 479 |
| 480 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 480 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
| 481 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); | 481 ArraySingleArgumentConstructorStub stub(isolate(), |
| 482 holey_kind, |
| 483 override_mode); |
| 482 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 484 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 483 __ B(&done); | 485 __ B(&done); |
| 484 __ Bind(&packed_case); | 486 __ Bind(&packed_case); |
| 485 } | 487 } |
| 486 | 488 |
| 487 ArraySingleArgumentConstructorStub stub(kind, override_mode); | 489 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode); |
| 488 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 490 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 489 __ Bind(&done); | 491 __ Bind(&done); |
| 490 } else { | 492 } else { |
| 491 ArrayNArgumentsConstructorStub stub(kind, override_mode); | 493 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode); |
| 492 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 494 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 493 } | 495 } |
| 494 | 496 |
| 495 ASSERT(ToRegister(instr->result()).is(x0)); | 497 ASSERT(ToRegister(instr->result()).is(x0)); |
| 496 } | 498 } |
| 497 | 499 |
| 498 | 500 |
| 499 void LCodeGen::CallRuntime(const Runtime::Function* function, | 501 void LCodeGen::CallRuntime(const Runtime::Function* function, |
| 500 int num_arguments, | 502 int num_arguments, |
| 501 LInstruction* instr, | 503 LInstruction* instr, |
| (...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 703 if (info()->saves_caller_doubles()) { | 705 if (info()->saves_caller_doubles()) { |
| 704 SaveCallerDoubles(); | 706 SaveCallerDoubles(); |
| 705 } | 707 } |
| 706 | 708 |
| 707 // Allocate a local context if needed. | 709 // Allocate a local context if needed. |
| 708 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 710 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 709 if (heap_slots > 0) { | 711 if (heap_slots > 0) { |
| 710 Comment(";;; Allocate local context"); | 712 Comment(";;; Allocate local context"); |
| 711 // Argument to NewContext is the function, which is in x1. | 713 // Argument to NewContext is the function, which is in x1. |
| 712 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 714 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 713 FastNewContextStub stub(heap_slots); | 715 FastNewContextStub stub(isolate(), heap_slots); |
| 714 __ CallStub(&stub); | 716 __ CallStub(&stub); |
| 715 } else { | 717 } else { |
| 716 __ Push(x1); | 718 __ Push(x1); |
| 717 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); | 719 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
| 718 } | 720 } |
| 719 RecordSafepoint(Safepoint::kNoLazyDeopt); | 721 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 720 // Context is returned in x0. It replaces the context passed to us. It's | 722 // Context is returned in x0. It replaces the context passed to us. It's |
| 721 // saved in the stack and kept live in cp. | 723 // saved in the stack and kept live in cp. |
| 722 __ Mov(cp, x0); | 724 __ Mov(cp, x0); |
| 723 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 725 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| (...skipping 1007 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1731 } | 1733 } |
| 1732 } | 1734 } |
| 1733 | 1735 |
| 1734 | 1736 |
| 1735 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1737 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1736 ASSERT(ToRegister(instr->context()).is(cp)); | 1738 ASSERT(ToRegister(instr->context()).is(cp)); |
| 1737 ASSERT(ToRegister(instr->left()).is(x1)); | 1739 ASSERT(ToRegister(instr->left()).is(x1)); |
| 1738 ASSERT(ToRegister(instr->right()).is(x0)); | 1740 ASSERT(ToRegister(instr->right()).is(x0)); |
| 1739 ASSERT(ToRegister(instr->result()).is(x0)); | 1741 ASSERT(ToRegister(instr->result()).is(x0)); |
| 1740 | 1742 |
| 1741 BinaryOpICStub stub(instr->op(), NO_OVERWRITE); | 1743 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); |
| 1742 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 1744 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 1743 } | 1745 } |
| 1744 | 1746 |
| 1745 | 1747 |
| 1746 void LCodeGen::DoBitI(LBitI* instr) { | 1748 void LCodeGen::DoBitI(LBitI* instr) { |
| 1747 Register result = ToRegister32(instr->result()); | 1749 Register result = ToRegister32(instr->result()); |
| 1748 Register left = ToRegister32(instr->left()); | 1750 Register left = ToRegister32(instr->left()); |
| 1749 Operand right = ToOperand32U(instr->right()); | 1751 Operand right = ToOperand32U(instr->right()); |
| 1750 | 1752 |
| 1751 switch (instr->op()) { | 1753 switch (instr->op()) { |
| (...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2042 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 2044 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 2043 CallRuntime(instr->function(), instr->arity(), instr); | 2045 CallRuntime(instr->function(), instr->arity(), instr); |
| 2044 } | 2046 } |
| 2045 | 2047 |
| 2046 | 2048 |
| 2047 void LCodeGen::DoCallStub(LCallStub* instr) { | 2049 void LCodeGen::DoCallStub(LCallStub* instr) { |
| 2048 ASSERT(ToRegister(instr->context()).is(cp)); | 2050 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2049 ASSERT(ToRegister(instr->result()).is(x0)); | 2051 ASSERT(ToRegister(instr->result()).is(x0)); |
| 2050 switch (instr->hydrogen()->major_key()) { | 2052 switch (instr->hydrogen()->major_key()) { |
| 2051 case CodeStub::RegExpExec: { | 2053 case CodeStub::RegExpExec: { |
| 2052 RegExpExecStub stub; | 2054 RegExpExecStub stub(isolate()); |
| 2053 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2055 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2054 break; | 2056 break; |
| 2055 } | 2057 } |
| 2056 case CodeStub::SubString: { | 2058 case CodeStub::SubString: { |
| 2057 SubStringStub stub; | 2059 SubStringStub stub(isolate()); |
| 2058 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2060 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2059 break; | 2061 break; |
| 2060 } | 2062 } |
| 2061 case CodeStub::StringCompare: { | 2063 case CodeStub::StringCompare: { |
| 2062 StringCompareStub stub; | 2064 StringCompareStub stub(isolate()); |
| 2063 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2065 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2064 break; | 2066 break; |
| 2065 } | 2067 } |
| 2066 default: | 2068 default: |
| 2067 UNREACHABLE(); | 2069 UNREACHABLE(); |
| 2068 } | 2070 } |
| 2069 } | 2071 } |
| 2070 | 2072 |
| 2071 | 2073 |
| 2072 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { | 2074 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { |
| (...skipping 708 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2781 | 2783 |
| 2782 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 2784 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 2783 ASSERT(ToRegister(instr->context()).is(cp)); | 2785 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2784 // FunctionLiteral instruction is marked as call, we can trash any register. | 2786 // FunctionLiteral instruction is marked as call, we can trash any register. |
| 2785 ASSERT(instr->IsMarkedAsCall()); | 2787 ASSERT(instr->IsMarkedAsCall()); |
| 2786 | 2788 |
| 2787 // Use the fast case closure allocation code that allocates in new | 2789 // Use the fast case closure allocation code that allocates in new |
| 2788 // space for nested functions that don't need literals cloning. | 2790 // space for nested functions that don't need literals cloning. |
| 2789 bool pretenure = instr->hydrogen()->pretenure(); | 2791 bool pretenure = instr->hydrogen()->pretenure(); |
| 2790 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 2792 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| 2791 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), | 2793 FastNewClosureStub stub(isolate(), |
| 2794 instr->hydrogen()->strict_mode(), |
| 2792 instr->hydrogen()->is_generator()); | 2795 instr->hydrogen()->is_generator()); |
| 2793 __ Mov(x2, Operand(instr->hydrogen()->shared_info())); | 2796 __ Mov(x2, Operand(instr->hydrogen()->shared_info())); |
| 2794 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2797 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2795 } else { | 2798 } else { |
| 2796 __ Mov(x2, Operand(instr->hydrogen()->shared_info())); | 2799 __ Mov(x2, Operand(instr->hydrogen()->shared_info())); |
| 2797 __ Mov(x1, Operand(pretenure ? factory()->true_value() | 2800 __ Mov(x1, Operand(pretenure ? factory()->true_value() |
| 2798 : factory()->false_value())); | 2801 : factory()->false_value())); |
| 2799 __ Push(cp, x2, x1); | 2802 __ Push(cp, x2, x1); |
| 2800 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); | 2803 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); |
| 2801 } | 2804 } |
| (...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2956 } | 2959 } |
| 2957 } | 2960 } |
| 2958 | 2961 |
| 2959 | 2962 |
| 2960 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2963 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2961 ASSERT(ToRegister(instr->context()).is(cp)); | 2964 ASSERT(ToRegister(instr->context()).is(cp)); |
| 2962 // Assert that the arguments are in the registers expected by InstanceofStub. | 2965 // Assert that the arguments are in the registers expected by InstanceofStub. |
| 2963 ASSERT(ToRegister(instr->left()).Is(InstanceofStub::left())); | 2966 ASSERT(ToRegister(instr->left()).Is(InstanceofStub::left())); |
| 2964 ASSERT(ToRegister(instr->right()).Is(InstanceofStub::right())); | 2967 ASSERT(ToRegister(instr->right()).Is(InstanceofStub::right())); |
| 2965 | 2968 |
| 2966 InstanceofStub stub(InstanceofStub::kArgsInRegisters); | 2969 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
| 2967 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2970 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2968 | 2971 |
| 2969 // InstanceofStub returns a result in x0: | 2972 // InstanceofStub returns a result in x0: |
| 2970 // 0 => not an instance | 2973 // 0 => not an instance |
| 2971 // smi 1 => instance. | 2974 // smi 1 => instance. |
| 2972 __ Cmp(x0, 0); | 2975 __ Cmp(x0, 0); |
| 2973 __ LoadTrueFalseRoots(x0, x1); | 2976 __ LoadTrueFalseRoots(x0, x1); |
| 2974 __ Csel(x0, x0, x1, eq); | 2977 __ Csel(x0, x0, x1, eq); |
| 2975 } | 2978 } |
| 2976 | 2979 |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3071 flags = static_cast<InstanceofStub::Flags>( | 3074 flags = static_cast<InstanceofStub::Flags>( |
| 3072 flags | InstanceofStub::kCallSiteInlineCheck); | 3075 flags | InstanceofStub::kCallSiteInlineCheck); |
| 3073 | 3076 |
| 3074 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 3077 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 3075 LoadContextFromDeferred(instr->context()); | 3078 LoadContextFromDeferred(instr->context()); |
| 3076 | 3079 |
| 3077 // Prepare InstanceofStub arguments. | 3080 // Prepare InstanceofStub arguments. |
| 3078 ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left())); | 3081 ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left())); |
| 3079 __ LoadObject(InstanceofStub::right(), instr->function()); | 3082 __ LoadObject(InstanceofStub::right(), instr->function()); |
| 3080 | 3083 |
| 3081 InstanceofStub stub(flags); | 3084 InstanceofStub stub(isolate(), flags); |
| 3082 CallCodeGeneric(stub.GetCode(isolate()), | 3085 CallCodeGeneric(stub.GetCode(isolate()), |
| 3083 RelocInfo::CODE_TARGET, | 3086 RelocInfo::CODE_TARGET, |
| 3084 instr, | 3087 instr, |
| 3085 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 3088 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
| 3086 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | 3089 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
| 3087 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 3090 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 3088 | 3091 |
| 3089 // Put the result value into the result register slot. | 3092 // Put the result value into the result register slot. |
| 3090 __ StoreToSafepointRegisterSlot(result, result); | 3093 __ StoreToSafepointRegisterSlot(result, result); |
| 3091 } | 3094 } |
| (...skipping 891 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3983 // Just make sure that the input/output registers are the expected ones. | 3986 // Just make sure that the input/output registers are the expected ones. |
| 3984 ASSERT(!instr->right()->IsDoubleRegister() || | 3987 ASSERT(!instr->right()->IsDoubleRegister() || |
| 3985 ToDoubleRegister(instr->right()).is(d1)); | 3988 ToDoubleRegister(instr->right()).is(d1)); |
| 3986 ASSERT(exponent_type.IsInteger32() || !instr->right()->IsRegister() || | 3989 ASSERT(exponent_type.IsInteger32() || !instr->right()->IsRegister() || |
| 3987 ToRegister(instr->right()).is(x11)); | 3990 ToRegister(instr->right()).is(x11)); |
| 3988 ASSERT(!exponent_type.IsInteger32() || ToRegister(instr->right()).is(x12)); | 3991 ASSERT(!exponent_type.IsInteger32() || ToRegister(instr->right()).is(x12)); |
| 3989 ASSERT(ToDoubleRegister(instr->left()).is(d0)); | 3992 ASSERT(ToDoubleRegister(instr->left()).is(d0)); |
| 3990 ASSERT(ToDoubleRegister(instr->result()).is(d0)); | 3993 ASSERT(ToDoubleRegister(instr->result()).is(d0)); |
| 3991 | 3994 |
| 3992 if (exponent_type.IsSmi()) { | 3995 if (exponent_type.IsSmi()) { |
| 3993 MathPowStub stub(MathPowStub::TAGGED); | 3996 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 3994 __ CallStub(&stub); | 3997 __ CallStub(&stub); |
| 3995 } else if (exponent_type.IsTagged()) { | 3998 } else if (exponent_type.IsTagged()) { |
| 3996 Label no_deopt; | 3999 Label no_deopt; |
| 3997 __ JumpIfSmi(x11, &no_deopt); | 4000 __ JumpIfSmi(x11, &no_deopt); |
| 3998 __ Ldr(x0, FieldMemOperand(x11, HeapObject::kMapOffset)); | 4001 __ Ldr(x0, FieldMemOperand(x11, HeapObject::kMapOffset)); |
| 3999 DeoptimizeIfNotRoot(x0, Heap::kHeapNumberMapRootIndex, | 4002 DeoptimizeIfNotRoot(x0, Heap::kHeapNumberMapRootIndex, |
| 4000 instr->environment()); | 4003 instr->environment()); |
| 4001 __ Bind(&no_deopt); | 4004 __ Bind(&no_deopt); |
| 4002 MathPowStub stub(MathPowStub::TAGGED); | 4005 MathPowStub stub(isolate(), MathPowStub::TAGGED); |
| 4003 __ CallStub(&stub); | 4006 __ CallStub(&stub); |
| 4004 } else if (exponent_type.IsInteger32()) { | 4007 } else if (exponent_type.IsInteger32()) { |
| 4005 // Ensure integer exponent has no garbage in top 32-bits, as MathPowStub | 4008 // Ensure integer exponent has no garbage in top 32-bits, as MathPowStub |
| 4006 // supports large integer exponents. | 4009 // supports large integer exponents. |
| 4007 Register exponent = ToRegister(instr->right()); | 4010 Register exponent = ToRegister(instr->right()); |
| 4008 __ Sxtw(exponent, exponent); | 4011 __ Sxtw(exponent, exponent); |
| 4009 MathPowStub stub(MathPowStub::INTEGER); | 4012 MathPowStub stub(isolate(), MathPowStub::INTEGER); |
| 4010 __ CallStub(&stub); | 4013 __ CallStub(&stub); |
| 4011 } else { | 4014 } else { |
| 4012 ASSERT(exponent_type.IsDouble()); | 4015 ASSERT(exponent_type.IsDouble()); |
| 4013 MathPowStub stub(MathPowStub::DOUBLE); | 4016 MathPowStub stub(isolate(), MathPowStub::DOUBLE); |
| 4014 __ CallStub(&stub); | 4017 __ CallStub(&stub); |
| 4015 } | 4018 } |
| 4016 } | 4019 } |
| 4017 | 4020 |
| 4018 | 4021 |
| 4019 void LCodeGen::DoMathRound(LMathRound* instr) { | 4022 void LCodeGen::DoMathRound(LMathRound* instr) { |
| 4020 // TODO(jbramley): We could provide a double result here using frint. | 4023 // TODO(jbramley): We could provide a double result here using frint. |
| 4021 DoubleRegister input = ToDoubleRegister(instr->value()); | 4024 DoubleRegister input = ToDoubleRegister(instr->value()); |
| 4022 DoubleRegister temp1 = ToDoubleRegister(instr->temp1()); | 4025 DoubleRegister temp1 = ToDoubleRegister(instr->temp1()); |
| 4023 Register result = ToRegister(instr->result()); | 4026 Register result = ToRegister(instr->result()); |
| (...skipping 1281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5305 __ Mov(x2, Operand(instr->name())); | 5308 __ Mov(x2, Operand(instr->name())); |
| 5306 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); | 5309 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); |
| 5307 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 5310 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 5308 } | 5311 } |
| 5309 | 5312 |
| 5310 | 5313 |
| 5311 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 5314 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 5312 ASSERT(ToRegister(instr->context()).is(cp)); | 5315 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5313 ASSERT(ToRegister(instr->left()).Is(x1)); | 5316 ASSERT(ToRegister(instr->left()).Is(x1)); |
| 5314 ASSERT(ToRegister(instr->right()).Is(x0)); | 5317 ASSERT(ToRegister(instr->right()).Is(x0)); |
| 5315 StringAddStub stub(instr->hydrogen()->flags(), | 5318 StringAddStub stub(isolate(), |
| 5319 instr->hydrogen()->flags(), |
| 5316 instr->hydrogen()->pretenure_flag()); | 5320 instr->hydrogen()->pretenure_flag()); |
| 5317 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5321 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 5318 } | 5322 } |
| 5319 | 5323 |
| 5320 | 5324 |
| 5321 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 5325 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 5322 class DeferredStringCharCodeAt: public LDeferredCode { | 5326 class DeferredStringCharCodeAt: public LDeferredCode { |
| 5323 public: | 5327 public: |
| 5324 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) | 5328 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) |
| 5325 : LDeferredCode(codegen), instr_(instr) { } | 5329 : LDeferredCode(codegen), instr_(instr) { } |
| (...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5630 // that we do not use any register across a call. | 5634 // that we do not use any register across a call. |
| 5631 __ CheckMap(object, temps.AcquireX(), from_map, ¬_applicable, | 5635 __ CheckMap(object, temps.AcquireX(), from_map, ¬_applicable, |
| 5632 DONT_DO_SMI_CHECK); | 5636 DONT_DO_SMI_CHECK); |
| 5633 } | 5637 } |
| 5634 ASSERT(object.is(x0)); | 5638 ASSERT(object.is(x0)); |
| 5635 ASSERT(ToRegister(instr->context()).is(cp)); | 5639 ASSERT(ToRegister(instr->context()).is(cp)); |
| 5636 PushSafepointRegistersScope scope( | 5640 PushSafepointRegistersScope scope( |
| 5637 this, Safepoint::kWithRegistersAndDoubles); | 5641 this, Safepoint::kWithRegistersAndDoubles); |
| 5638 __ Mov(x1, Operand(to_map)); | 5642 __ Mov(x1, Operand(to_map)); |
| 5639 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; | 5643 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; |
| 5640 TransitionElementsKindStub stub(from_kind, to_kind, is_js_array); | 5644 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); |
| 5641 __ CallStub(&stub); | 5645 __ CallStub(&stub); |
| 5642 RecordSafepointWithRegistersAndDoubles( | 5646 RecordSafepointWithRegistersAndDoubles( |
| 5643 instr->pointer_map(), 0, Safepoint::kLazyDeopt); | 5647 instr->pointer_map(), 0, Safepoint::kLazyDeopt); |
| 5644 } | 5648 } |
| 5645 __ Bind(¬_applicable); | 5649 __ Bind(¬_applicable); |
| 5646 } | 5650 } |
| 5647 | 5651 |
| 5648 | 5652 |
| 5649 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { | 5653 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { |
| 5650 Register object = ToRegister(instr->object()); | 5654 Register object = ToRegister(instr->object()); |
| (...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5895 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5899 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 5896 // Index is equal to negated out of object property index plus 1. | 5900 // Index is equal to negated out of object property index plus 1. |
| 5897 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5901 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 5898 __ Ldr(result, FieldMemOperand(result, | 5902 __ Ldr(result, FieldMemOperand(result, |
| 5899 FixedArray::kHeaderSize - kPointerSize)); | 5903 FixedArray::kHeaderSize - kPointerSize)); |
| 5900 __ Bind(deferred->exit()); | 5904 __ Bind(deferred->exit()); |
| 5901 __ Bind(&done); | 5905 __ Bind(&done); |
| 5902 } | 5906 } |
| 5903 | 5907 |
| 5904 } } // namespace v8::internal | 5908 } } // namespace v8::internal |
| OLD | NEW |