| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 19 matching lines...) Expand all Loading... |
| 30 #if defined(V8_TARGET_ARCH_IA32) | 30 #if defined(V8_TARGET_ARCH_IA32) |
| 31 | 31 |
| 32 #include "ia32/lithium-codegen-ia32.h" | 32 #include "ia32/lithium-codegen-ia32.h" |
| 33 #include "code-stubs.h" | 33 #include "code-stubs.h" |
| 34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 // When invoking builtins, we need to record the safepoint in the middle of |
| 41 // the invoke instruction sequence generated by the macro assembler. |
| 40 class SafepointGenerator : public PostCallGenerator { | 42 class SafepointGenerator : public PostCallGenerator { |
| 41 public: | 43 public: |
| 42 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
| 43 LPointerMap* pointers, | 45 LPointerMap* pointers, |
| 44 int deoptimization_index) | 46 int deoptimization_index) |
| 45 : codegen_(codegen), | 47 : codegen_(codegen), |
| 46 pointers_(pointers), | 48 pointers_(pointers), |
| 47 deoptimization_index_(deoptimization_index) { } | 49 deoptimization_index_(deoptimization_index) { } |
| 48 virtual ~SafepointGenerator() { } | 50 virtual ~SafepointGenerator() { } |
| 49 | 51 |
| (...skipping 18 matching lines...) Expand all Loading... |
| 68 return GeneratePrologue() && | 70 return GeneratePrologue() && |
| 69 GenerateBody() && | 71 GenerateBody() && |
| 70 GenerateDeferredCode() && | 72 GenerateDeferredCode() && |
| 71 GenerateSafepointTable(); | 73 GenerateSafepointTable(); |
| 72 } | 74 } |
| 73 | 75 |
| 74 | 76 |
| 75 void LCodeGen::FinishCode(Handle<Code> code) { | 77 void LCodeGen::FinishCode(Handle<Code> code) { |
| 76 ASSERT(is_done()); | 78 ASSERT(is_done()); |
| 77 code->set_stack_slots(StackSlotCount()); | 79 code->set_stack_slots(StackSlotCount()); |
| 78 code->set_safepoint_table_start(safepoints_.GetCodeOffset()); | 80 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
| 79 PopulateDeoptimizationData(code); | 81 PopulateDeoptimizationData(code); |
| 80 } | 82 } |
| 81 | 83 |
| 82 | 84 |
| 83 void LCodeGen::Abort(const char* format, ...) { | 85 void LCodeGen::Abort(const char* format, ...) { |
| 84 if (FLAG_trace_bailout) { | 86 if (FLAG_trace_bailout) { |
| 85 SmartPointer<char> debug_name = graph()->debug_name()->ToCString(); | 87 SmartPointer<char> debug_name = graph()->debug_name()->ToCString(); |
| 86 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name); | 88 PrintF("Aborting LCodeGen in @\"%s\": ", *debug_name); |
| 87 va_list arguments; | 89 va_list arguments; |
| 88 va_start(arguments, format); | 90 va_start(arguments, format); |
| (...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 359 translation->StoreLiteral(src_index); | 361 translation->StoreLiteral(src_index); |
| 360 } else { | 362 } else { |
| 361 UNREACHABLE(); | 363 UNREACHABLE(); |
| 362 } | 364 } |
| 363 } | 365 } |
| 364 | 366 |
| 365 | 367 |
| 366 void LCodeGen::CallCode(Handle<Code> code, | 368 void LCodeGen::CallCode(Handle<Code> code, |
| 367 RelocInfo::Mode mode, | 369 RelocInfo::Mode mode, |
| 368 LInstruction* instr) { | 370 LInstruction* instr) { |
| 369 if (instr != NULL) { | 371 ASSERT(instr != NULL); |
| 370 LPointerMap* pointers = instr->pointer_map(); | 372 LPointerMap* pointers = instr->pointer_map(); |
| 371 RecordPosition(pointers->position()); | 373 RecordPosition(pointers->position()); |
| 372 __ call(code, mode); | 374 __ call(code, mode); |
| 373 RegisterLazyDeoptimization(instr); | 375 RegisterLazyDeoptimization(instr); |
| 374 } else { | |
| 375 LPointerMap no_pointers(0); | |
| 376 RecordPosition(no_pointers.position()); | |
| 377 __ call(code, mode); | |
| 378 RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex); | |
| 379 } | |
| 380 | 376 |
| 381 // Signal that we don't inline smi code before these stubs in the | 377 // Signal that we don't inline smi code before these stubs in the |
| 382 // optimizing code generator. | 378 // optimizing code generator. |
| 383 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || | 379 if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC || |
| 384 code->kind() == Code::COMPARE_IC) { | 380 code->kind() == Code::COMPARE_IC) { |
| 385 __ nop(); | 381 __ nop(); |
| 386 } | 382 } |
| 387 } | 383 } |
| 388 | 384 |
| 389 | 385 |
| 390 void LCodeGen::CallRuntime(const Runtime::Function* function, | 386 void LCodeGen::CallRuntime(const Runtime::Function* function, |
| 391 int num_arguments, | 387 int num_arguments, |
| 392 LInstruction* instr) { | 388 LInstruction* instr) { |
| 393 ASSERT(instr != NULL); | 389 ASSERT(instr != NULL); |
| 390 ASSERT(instr->HasPointerMap()); |
| 394 LPointerMap* pointers = instr->pointer_map(); | 391 LPointerMap* pointers = instr->pointer_map(); |
| 395 ASSERT(pointers != NULL); | |
| 396 RecordPosition(pointers->position()); | 392 RecordPosition(pointers->position()); |
| 397 | 393 |
| 398 __ CallRuntime(function, num_arguments); | 394 __ CallRuntime(function, num_arguments); |
| 399 // Runtime calls to Throw are not supposed to ever return at the | 395 RegisterLazyDeoptimization(instr); |
| 400 // call site, so don't register lazy deoptimization for these. We do | |
| 401 // however have to record a safepoint since throwing exceptions can | |
| 402 // cause garbage collections. | |
| 403 // BUG(3243555): register a lazy deoptimization point at throw. We need | |
| 404 // it to be able to inline functions containing a throw statement. | |
| 405 if (!instr->IsThrow()) { | |
| 406 RegisterLazyDeoptimization(instr); | |
| 407 } else { | |
| 408 RecordSafepoint(instr->pointer_map(), Safepoint::kNoDeoptimizationIndex); | |
| 409 } | |
| 410 } | 396 } |
| 411 | 397 |
| 412 | 398 |
| 413 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 399 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { |
| 414 // Create the environment to bailout to. If the call has side effects | 400 // Create the environment to bailout to. If the call has side effects |
| 415 // execution has to continue after the call otherwise execution can continue | 401 // execution has to continue after the call otherwise execution can continue |
| 416 // from a previous bailout point repeating the call. | 402 // from a previous bailout point repeating the call. |
| 417 LEnvironment* deoptimization_environment; | 403 LEnvironment* deoptimization_environment; |
| 418 if (instr->HasDeoptimizationEnvironment()) { | 404 if (instr->HasDeoptimizationEnvironment()) { |
| 419 deoptimization_environment = instr->deoptimization_environment(); | 405 deoptimization_environment = instr->deoptimization_environment(); |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 559 for (int i = 0, length = inlined_closures->length(); | 545 for (int i = 0, length = inlined_closures->length(); |
| 560 i < length; | 546 i < length; |
| 561 i++) { | 547 i++) { |
| 562 DefineDeoptimizationLiteral(inlined_closures->at(i)); | 548 DefineDeoptimizationLiteral(inlined_closures->at(i)); |
| 563 } | 549 } |
| 564 | 550 |
| 565 inlined_function_count_ = deoptimization_literals_.length(); | 551 inlined_function_count_ = deoptimization_literals_.length(); |
| 566 } | 552 } |
| 567 | 553 |
| 568 | 554 |
| 555 void LCodeGen::RecordSafepoint( |
| 556 LPointerMap* pointers, |
| 557 Safepoint::Kind kind, |
| 558 int arguments, |
| 559 int deoptimization_index) { |
| 560 const ZoneList<LOperand*>* operands = pointers->operands(); |
| 561 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), |
| 562 kind, arguments, deoptimization_index); |
| 563 for (int i = 0; i < operands->length(); i++) { |
| 564 LOperand* pointer = operands->at(i); |
| 565 if (pointer->IsStackSlot()) { |
| 566 safepoint.DefinePointerSlot(pointer->index()); |
| 567 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { |
| 568 safepoint.DefinePointerRegister(ToRegister(pointer)); |
| 569 } |
| 570 } |
| 571 if (kind & Safepoint::kWithRegisters) { |
| 572 // Register esi always contains a pointer to the context. |
| 573 safepoint.DefinePointerRegister(esi); |
| 574 } |
| 575 } |
| 576 |
| 577 |
| 569 void LCodeGen::RecordSafepoint(LPointerMap* pointers, | 578 void LCodeGen::RecordSafepoint(LPointerMap* pointers, |
| 570 int deoptimization_index) { | 579 int deoptimization_index) { |
| 571 const ZoneList<LOperand*>* operands = pointers->operands(); | 580 RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index); |
| 572 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), | |
| 573 deoptimization_index); | |
| 574 for (int i = 0; i < operands->length(); i++) { | |
| 575 LOperand* pointer = operands->at(i); | |
| 576 if (pointer->IsStackSlot()) { | |
| 577 safepoint.DefinePointerSlot(pointer->index()); | |
| 578 } | |
| 579 } | |
| 580 } | 581 } |
| 581 | 582 |
| 582 | 583 |
| 583 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, | 584 void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers, |
| 584 int arguments, | 585 int arguments, |
| 585 int deoptimization_index) { | 586 int deoptimization_index) { |
| 586 const ZoneList<LOperand*>* operands = pointers->operands(); | 587 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, |
| 587 Safepoint safepoint = | 588 deoptimization_index); |
| 588 safepoints_.DefineSafepointWithRegisters( | |
| 589 masm(), arguments, deoptimization_index); | |
| 590 for (int i = 0; i < operands->length(); i++) { | |
| 591 LOperand* pointer = operands->at(i); | |
| 592 if (pointer->IsStackSlot()) { | |
| 593 safepoint.DefinePointerSlot(pointer->index()); | |
| 594 } else if (pointer->IsRegister()) { | |
| 595 safepoint.DefinePointerRegister(ToRegister(pointer)); | |
| 596 } | |
| 597 } | |
| 598 // Register esi always contains a pointer to the context. | |
| 599 safepoint.DefinePointerRegister(esi); | |
| 600 } | 589 } |
| 601 | 590 |
| 602 | 591 |
| 603 void LCodeGen::RecordPosition(int position) { | 592 void LCodeGen::RecordPosition(int position) { |
| 604 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; | 593 if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return; |
| 605 masm()->positions_recorder()->RecordPosition(position); | 594 masm()->positions_recorder()->RecordPosition(position); |
| 606 } | 595 } |
| 607 | 596 |
| 608 | 597 |
| 609 void LCodeGen::DoLabel(LLabel* label) { | 598 void LCodeGen::DoLabel(LLabel* label) { |
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 808 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { | 797 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { |
| 809 DeoptimizeIf(overflow, instr->environment()); | 798 DeoptimizeIf(overflow, instr->environment()); |
| 810 } | 799 } |
| 811 | 800 |
| 812 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 801 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 813 // Bail out if the result is supposed to be negative zero. | 802 // Bail out if the result is supposed to be negative zero. |
| 814 NearLabel done; | 803 NearLabel done; |
| 815 __ test(left, Operand(left)); | 804 __ test(left, Operand(left)); |
| 816 __ j(not_zero, &done); | 805 __ j(not_zero, &done); |
| 817 if (right->IsConstantOperand()) { | 806 if (right->IsConstantOperand()) { |
| 818 if (ToInteger32(LConstantOperand::cast(right)) < 0) { | 807 if (ToInteger32(LConstantOperand::cast(right)) <= 0) { |
| 819 DeoptimizeIf(no_condition, instr->environment()); | 808 DeoptimizeIf(no_condition, instr->environment()); |
| 820 } | 809 } |
| 821 } else { | 810 } else { |
| 822 // Test the non-zero operand for negative sign. | 811 // Test the non-zero operand for negative sign. |
| 823 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right)); | 812 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right)); |
| 824 DeoptimizeIf(sign, instr->environment()); | 813 DeoptimizeIf(sign, instr->environment()); |
| 825 } | 814 } |
| 826 __ bind(&done); | 815 __ bind(&done); |
| 827 } | 816 } |
| 828 } | 817 } |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 949 | 938 |
| 950 void LCodeGen::DoConstantD(LConstantD* instr) { | 939 void LCodeGen::DoConstantD(LConstantD* instr) { |
| 951 ASSERT(instr->result()->IsDoubleRegister()); | 940 ASSERT(instr->result()->IsDoubleRegister()); |
| 952 XMMRegister res = ToDoubleRegister(instr->result()); | 941 XMMRegister res = ToDoubleRegister(instr->result()); |
| 953 double v = instr->value(); | 942 double v = instr->value(); |
| 954 // Use xor to produce +0.0 in a fast and compact way, but avoid to | 943 // Use xor to produce +0.0 in a fast and compact way, but avoid to |
| 955 // do so if the constant is -0.0. | 944 // do so if the constant is -0.0. |
| 956 if (BitCast<uint64_t, double>(v) == 0) { | 945 if (BitCast<uint64_t, double>(v) == 0) { |
| 957 __ xorpd(res, res); | 946 __ xorpd(res, res); |
| 958 } else { | 947 } else { |
| 959 int32_t v_int32 = static_cast<int32_t>(v); | 948 Register temp = ToRegister(instr->TempAt(0)); |
| 960 if (static_cast<double>(v_int32) == v) { | 949 uint64_t int_val = BitCast<uint64_t, double>(v); |
| 961 __ push_imm32(v_int32); | 950 int32_t lower = static_cast<int32_t>(int_val); |
| 962 __ cvtsi2sd(res, Operand(esp, 0)); | 951 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); |
| 963 __ add(Operand(esp), Immediate(kPointerSize)); | 952 if (Isolate::Current()->cpu_features()->IsSupported(SSE4_1)) { |
| 953 CpuFeatures::Scope scope(SSE4_1); |
| 954 if (lower != 0) { |
| 955 __ Set(temp, Immediate(lower)); |
| 956 __ movd(res, Operand(temp)); |
| 957 __ Set(temp, Immediate(upper)); |
| 958 __ pinsrd(res, Operand(temp), 1); |
| 959 } else { |
| 960 __ xorpd(res, res); |
| 961 __ Set(temp, Immediate(upper)); |
| 962 __ pinsrd(res, Operand(temp), 1); |
| 963 } |
| 964 } else { | 964 } else { |
| 965 uint64_t int_val = BitCast<uint64_t, double>(v); | 965 __ Set(temp, Immediate(upper)); |
| 966 int32_t lower = static_cast<int32_t>(int_val); | 966 __ movd(res, Operand(temp)); |
| 967 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); | 967 __ psllq(res, 32); |
| 968 __ push_imm32(upper); | 968 if (lower != 0) { |
| 969 __ push_imm32(lower); | 969 __ Set(temp, Immediate(lower)); |
| 970 __ movdbl(res, Operand(esp, 0)); | 970 __ movd(xmm0, Operand(temp)); |
| 971 __ add(Operand(esp), Immediate(2 * kPointerSize)); | 971 __ por(res, xmm0); |
| 972 } |
| 972 } | 973 } |
| 973 } | 974 } |
| 974 } | 975 } |
| 975 | 976 |
| 976 | 977 |
| 977 void LCodeGen::DoConstantT(LConstantT* instr) { | 978 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 978 ASSERT(instr->result()->IsRegister()); | 979 ASSERT(instr->result()->IsRegister()); |
| 979 __ Set(ToRegister(instr->result()), Immediate(instr->value())); | 980 __ Set(ToRegister(instr->result()), Immediate(instr->value())); |
| 980 } | 981 } |
| 981 | 982 |
| (...skipping 903 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1885 | 1886 |
| 1886 void LCodeGen::DoReturn(LReturn* instr) { | 1887 void LCodeGen::DoReturn(LReturn* instr) { |
| 1887 if (FLAG_trace) { | 1888 if (FLAG_trace) { |
| 1888 // Preserve the return value on the stack and rely on the runtime | 1889 // Preserve the return value on the stack and rely on the runtime |
| 1889 // call to return the value in the same register. | 1890 // call to return the value in the same register. |
| 1890 __ push(eax); | 1891 __ push(eax); |
| 1891 __ CallRuntime(Runtime::kTraceExit, 1); | 1892 __ CallRuntime(Runtime::kTraceExit, 1); |
| 1892 } | 1893 } |
| 1893 __ mov(esp, ebp); | 1894 __ mov(esp, ebp); |
| 1894 __ pop(ebp); | 1895 __ pop(ebp); |
| 1895 __ ret((ParameterCount() + 1) * kPointerSize); | 1896 __ Ret((ParameterCount() + 1) * kPointerSize, ecx); |
| 1896 } | 1897 } |
| 1897 | 1898 |
| 1898 | 1899 |
| 1899 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { | 1900 void LCodeGen::DoLoadGlobal(LLoadGlobal* instr) { |
| 1900 Register result = ToRegister(instr->result()); | 1901 Register result = ToRegister(instr->result()); |
| 1901 __ mov(result, Operand::Cell(instr->hydrogen()->cell())); | 1902 __ mov(result, Operand::Cell(instr->hydrogen()->cell())); |
| 1902 if (instr->hydrogen()->check_hole_value()) { | 1903 if (instr->hydrogen()->check_hole_value()) { |
| 1903 __ cmp(result, FACTORY->the_hole_value()); | 1904 __ cmp(result, FACTORY->the_hole_value()); |
| 1904 DeoptimizeIf(equal, instr->environment()); | 1905 DeoptimizeIf(equal, instr->environment()); |
| 1905 } | 1906 } |
| 1906 } | 1907 } |
| 1907 | 1908 |
| 1908 | 1909 |
| 1909 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { | 1910 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { |
| 1910 Register value = ToRegister(instr->InputAt(0)); | 1911 Register value = ToRegister(instr->InputAt(0)); |
| 1911 __ mov(Operand::Cell(instr->hydrogen()->cell()), value); | 1912 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); |
| 1913 |
| 1914 // If the cell we are storing to contains the hole it could have |
| 1915 // been deleted from the property dictionary. In that case, we need |
| 1916 // to update the property details in the property dictionary to mark |
| 1917 // it as no longer deleted. We deoptimize in that case. |
| 1918 if (instr->hydrogen()->check_hole_value()) { |
| 1919 __ cmp(cell_operand, FACTORY->the_hole_value()); |
| 1920 DeoptimizeIf(equal, instr->environment()); |
| 1921 } |
| 1922 |
| 1923 // Store the value. |
| 1924 __ mov(cell_operand, value); |
| 1912 } | 1925 } |
| 1913 | 1926 |
| 1914 | 1927 |
| 1915 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 1928 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 1916 // TODO(antonm): load a context with a separate instruction. | 1929 Register context = ToRegister(instr->context()); |
| 1917 Register result = ToRegister(instr->result()); | 1930 Register result = ToRegister(instr->result()); |
| 1918 __ LoadContext(result, instr->context_chain_length()); | 1931 __ mov(result, ContextOperand(context, instr->slot_index())); |
| 1919 __ mov(result, ContextOperand(result, instr->slot_index())); | 1932 } |
| 1933 |
| 1934 |
| 1935 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| 1936 Register context = ToRegister(instr->context()); |
| 1937 Register value = ToRegister(instr->value()); |
| 1938 __ mov(ContextOperand(context, instr->slot_index()), value); |
| 1939 if (instr->needs_write_barrier()) { |
| 1940 Register temp = ToRegister(instr->TempAt(0)); |
| 1941 int offset = Context::SlotOffset(instr->slot_index()); |
| 1942 __ RecordWrite(context, offset, value, temp); |
| 1943 } |
| 1920 } | 1944 } |
| 1921 | 1945 |
| 1922 | 1946 |
| 1923 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 1947 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 1924 Register object = ToRegister(instr->InputAt(0)); | 1948 Register object = ToRegister(instr->InputAt(0)); |
| 1925 Register result = ToRegister(instr->result()); | 1949 Register result = ToRegister(instr->result()); |
| 1926 if (instr->hydrogen()->is_in_object()) { | 1950 if (instr->hydrogen()->is_in_object()) { |
| 1927 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); | 1951 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); |
| 1928 } else { | 1952 } else { |
| 1929 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 1953 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2017 } | 2041 } |
| 2018 | 2042 |
| 2019 | 2043 |
| 2020 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { | 2044 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { |
| 2021 Register elements = ToRegister(instr->elements()); | 2045 Register elements = ToRegister(instr->elements()); |
| 2022 Register key = ToRegister(instr->key()); | 2046 Register key = ToRegister(instr->key()); |
| 2023 Register result = ToRegister(instr->result()); | 2047 Register result = ToRegister(instr->result()); |
| 2024 ASSERT(result.is(elements)); | 2048 ASSERT(result.is(elements)); |
| 2025 | 2049 |
| 2026 // Load the result. | 2050 // Load the result. |
| 2027 __ mov(result, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); | 2051 __ mov(result, FieldOperand(elements, |
| 2052 key, |
| 2053 times_pointer_size, |
| 2054 FixedArray::kHeaderSize)); |
| 2028 | 2055 |
| 2029 // Check for the hole value. | 2056 // Check for the hole value. |
| 2030 __ cmp(result, FACTORY->the_hole_value()); | 2057 __ cmp(result, FACTORY->the_hole_value()); |
| 2031 DeoptimizeIf(equal, instr->environment()); | 2058 DeoptimizeIf(equal, instr->environment()); |
| 2032 } | 2059 } |
| 2033 | 2060 |
| 2034 | 2061 |
| 2035 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { | 2062 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
| 2036 ASSERT(ToRegister(instr->object()).is(edx)); | 2063 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2037 ASSERT(ToRegister(instr->key()).is(eax)); | 2064 ASSERT(ToRegister(instr->key()).is(eax)); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2125 // length is a small non-negative integer, due to the test above. | 2152 // length is a small non-negative integer, due to the test above. |
| 2126 __ test(length, Operand(length)); | 2153 __ test(length, Operand(length)); |
| 2127 __ j(zero, &invoke); | 2154 __ j(zero, &invoke); |
| 2128 __ bind(&loop); | 2155 __ bind(&loop); |
| 2129 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); | 2156 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); |
| 2130 __ dec(length); | 2157 __ dec(length); |
| 2131 __ j(not_zero, &loop); | 2158 __ j(not_zero, &loop); |
| 2132 | 2159 |
| 2133 // Invoke the function. | 2160 // Invoke the function. |
| 2134 __ bind(&invoke); | 2161 __ bind(&invoke); |
| 2162 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 2163 LPointerMap* pointers = instr->pointer_map(); |
| 2164 LEnvironment* env = instr->deoptimization_environment(); |
| 2165 RecordPosition(pointers->position()); |
| 2166 RegisterEnvironmentForDeoptimization(env); |
| 2167 SafepointGenerator safepoint_generator(this, |
| 2168 pointers, |
| 2169 env->deoptimization_index()); |
| 2135 ASSERT(receiver.is(eax)); | 2170 ASSERT(receiver.is(eax)); |
| 2136 v8::internal::ParameterCount actual(eax); | 2171 v8::internal::ParameterCount actual(eax); |
| 2137 SafepointGenerator safepoint_generator(this, | |
| 2138 instr->pointer_map(), | |
| 2139 Safepoint::kNoDeoptimizationIndex); | |
| 2140 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator); | 2172 __ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator); |
| 2173 |
| 2174 // Restore context. |
| 2175 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 2141 } | 2176 } |
| 2142 | 2177 |
| 2143 | 2178 |
| 2144 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2179 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
| 2145 LOperand* argument = instr->InputAt(0); | 2180 LOperand* argument = instr->InputAt(0); |
| 2146 if (argument->IsConstantOperand()) { | 2181 if (argument->IsConstantOperand()) { |
| 2147 __ push(ToImmediate(argument)); | 2182 __ push(ToImmediate(argument)); |
| 2148 } else { | 2183 } else { |
| 2149 __ push(ToOperand(argument)); | 2184 __ push(ToOperand(argument)); |
| 2150 } | 2185 } |
| 2151 } | 2186 } |
| 2152 | 2187 |
| 2153 | 2188 |
| 2189 void LCodeGen::DoContext(LContext* instr) { |
| 2190 Register result = ToRegister(instr->result()); |
| 2191 __ mov(result, esi); |
| 2192 } |
| 2193 |
| 2194 |
| 2195 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 2196 Register context = ToRegister(instr->context()); |
| 2197 Register result = ToRegister(instr->result()); |
| 2198 __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX))); |
| 2199 __ mov(result, FieldOperand(result, JSFunction::kContextOffset)); |
| 2200 } |
| 2201 |
| 2202 |
| 2154 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 2203 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 2204 Register context = ToRegister(instr->context()); |
| 2155 Register result = ToRegister(instr->result()); | 2205 Register result = ToRegister(instr->result()); |
| 2156 __ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2206 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 2157 } | 2207 } |
| 2158 | 2208 |
| 2159 | 2209 |
| 2160 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 2210 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 2211 Register global = ToRegister(instr->global()); |
| 2161 Register result = ToRegister(instr->result()); | 2212 Register result = ToRegister(instr->result()); |
| 2162 __ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 2213 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); |
| 2163 __ mov(result, FieldOperand(result, GlobalObject::kGlobalReceiverOffset)); | |
| 2164 } | 2214 } |
| 2165 | 2215 |
| 2166 | 2216 |
| 2167 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 2217 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 2168 int arity, | 2218 int arity, |
| 2169 LInstruction* instr) { | 2219 LInstruction* instr) { |
| 2170 // Change context if needed. | 2220 // Change context if needed. |
| 2171 bool change_context = | 2221 bool change_context = |
| 2172 (graph()->info()->closure()->context() != function->context()) || | 2222 (graph()->info()->closure()->context() != function->context()) || |
| 2173 scope()->contains_with() || | 2223 scope()->contains_with() || |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2377 | 2427 |
| 2378 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { | 2428 void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) { |
| 2379 XMMRegister xmm_scratch = xmm0; | 2429 XMMRegister xmm_scratch = xmm0; |
| 2380 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); | 2430 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
| 2381 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 2431 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 2382 ExternalReference negative_infinity = | 2432 ExternalReference negative_infinity = |
| 2383 ExternalReference::address_of_negative_infinity(); | 2433 ExternalReference::address_of_negative_infinity(); |
| 2384 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); | 2434 __ movdbl(xmm_scratch, Operand::StaticVariable(negative_infinity)); |
| 2385 __ ucomisd(xmm_scratch, input_reg); | 2435 __ ucomisd(xmm_scratch, input_reg); |
| 2386 DeoptimizeIf(equal, instr->environment()); | 2436 DeoptimizeIf(equal, instr->environment()); |
| 2437 __ xorpd(xmm_scratch, xmm_scratch); |
| 2438 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. |
| 2387 __ sqrtsd(input_reg, input_reg); | 2439 __ sqrtsd(input_reg, input_reg); |
| 2388 } | 2440 } |
| 2389 | 2441 |
| 2390 | 2442 |
| 2391 void LCodeGen::DoPower(LPower* instr) { | 2443 void LCodeGen::DoPower(LPower* instr) { |
| 2392 LOperand* left = instr->InputAt(0); | 2444 LOperand* left = instr->InputAt(0); |
| 2393 LOperand* right = instr->InputAt(1); | 2445 LOperand* right = instr->InputAt(1); |
| 2394 DoubleRegister result_reg = ToDoubleRegister(instr->result()); | 2446 DoubleRegister result_reg = ToDoubleRegister(instr->result()); |
| 2395 Representation exponent_type = instr->hydrogen()->right()->representation(); | 2447 Representation exponent_type = instr->hydrogen()->right()->representation(); |
| 2396 if (exponent_type.IsDouble()) { | 2448 if (exponent_type.IsDouble()) { |
| (...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2624 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; | 2676 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; |
| 2625 | 2677 |
| 2626 // Do the store. | 2678 // Do the store. |
| 2627 if (instr->key()->IsConstantOperand()) { | 2679 if (instr->key()->IsConstantOperand()) { |
| 2628 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); | 2680 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
| 2629 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 2681 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 2630 int offset = | 2682 int offset = |
| 2631 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; | 2683 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; |
| 2632 __ mov(FieldOperand(elements, offset), value); | 2684 __ mov(FieldOperand(elements, offset), value); |
| 2633 } else { | 2685 } else { |
| 2634 __ mov(FieldOperand(elements, key, times_4, FixedArray::kHeaderSize), | 2686 __ mov(FieldOperand(elements, |
| 2687 key, |
| 2688 times_pointer_size, |
| 2689 FixedArray::kHeaderSize), |
| 2635 value); | 2690 value); |
| 2636 } | 2691 } |
| 2637 | 2692 |
| 2638 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2693 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2639 // Compute address of modified element and store it into key register. | 2694 // Compute address of modified element and store it into key register. |
| 2640 __ lea(key, FieldOperand(elements, key, times_4, FixedArray::kHeaderSize)); | 2695 __ lea(key, |
| 2696 FieldOperand(elements, |
| 2697 key, |
| 2698 times_pointer_size, |
| 2699 FixedArray::kHeaderSize)); |
| 2641 __ RecordWrite(elements, key, value); | 2700 __ RecordWrite(elements, key, value); |
| 2642 } | 2701 } |
| 2643 } | 2702 } |
| 2644 | 2703 |
| 2645 | 2704 |
| 2646 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 2705 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 2647 ASSERT(ToRegister(instr->object()).is(edx)); | 2706 ASSERT(ToRegister(instr->object()).is(edx)); |
| 2648 ASSERT(ToRegister(instr->key()).is(ecx)); | 2707 ASSERT(ToRegister(instr->key()).is(ecx)); |
| 2649 ASSERT(ToRegister(instr->value()).is(eax)); | 2708 ASSERT(ToRegister(instr->value()).is(eax)); |
| 2650 | 2709 |
| (...skipping 748 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3399 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); | 3458 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); |
| 3400 __ mov(FieldOperand(eax, size - kPointerSize), edx); | 3459 __ mov(FieldOperand(eax, size - kPointerSize), edx); |
| 3401 } | 3460 } |
| 3402 } | 3461 } |
| 3403 | 3462 |
| 3404 | 3463 |
| 3405 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 3464 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 3406 // Use the fast case closure allocation code that allocates in new | 3465 // Use the fast case closure allocation code that allocates in new |
| 3407 // space for nested functions that don't need literals cloning. | 3466 // space for nested functions that don't need literals cloning. |
| 3408 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 3467 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
| 3409 bool pretenure = !instr->hydrogen()->pretenure(); | 3468 bool pretenure = instr->hydrogen()->pretenure(); |
| 3410 if (shared_info->num_literals() == 0 && !pretenure) { | 3469 if (shared_info->num_literals() == 0 && !pretenure) { |
| 3411 FastNewClosureStub stub; | 3470 FastNewClosureStub stub; |
| 3412 __ push(Immediate(shared_info)); | 3471 __ push(Immediate(shared_info)); |
| 3413 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3472 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 3414 } else { | 3473 } else { |
| 3415 __ push(esi); | 3474 __ push(esi); |
| 3416 __ push(Immediate(shared_info)); | 3475 __ push(Immediate(shared_info)); |
| 3417 __ push(Immediate(pretenure | 3476 __ push(Immediate(pretenure |
| 3418 ? FACTORY->true_value() | 3477 ? FACTORY->true_value() |
| 3419 : FACTORY->false_value())); | 3478 : FACTORY->false_value())); |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3541 } else { | 3600 } else { |
| 3542 final_branch_condition = not_equal; | 3601 final_branch_condition = not_equal; |
| 3543 __ jmp(false_label); | 3602 __ jmp(false_label); |
| 3544 // A dead branch instruction will be generated after this point. | 3603 // A dead branch instruction will be generated after this point. |
| 3545 } | 3604 } |
| 3546 | 3605 |
| 3547 return final_branch_condition; | 3606 return final_branch_condition; |
| 3548 } | 3607 } |
| 3549 | 3608 |
| 3550 | 3609 |
| 3610 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) { |
| 3611 Register result = ToRegister(instr->result()); |
| 3612 NearLabel true_label; |
| 3613 NearLabel false_label; |
| 3614 NearLabel done; |
| 3615 |
| 3616 EmitIsConstructCall(result); |
| 3617 __ j(equal, &true_label); |
| 3618 |
| 3619 __ mov(result, FACTORY->false_value()); |
| 3620 __ jmp(&done); |
| 3621 |
| 3622 __ bind(&true_label); |
| 3623 __ mov(result, FACTORY->true_value()); |
| 3624 |
| 3625 __ bind(&done); |
| 3626 } |
| 3627 |
| 3628 |
| 3629 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { |
| 3630 Register temp = ToRegister(instr->TempAt(0)); |
| 3631 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 3632 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 3633 |
| 3634 EmitIsConstructCall(temp); |
| 3635 EmitBranch(true_block, false_block, equal); |
| 3636 } |
| 3637 |
| 3638 |
| 3639 void LCodeGen::EmitIsConstructCall(Register temp) { |
| 3640 // Get the frame pointer for the calling frame. |
| 3641 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 3642 |
| 3643 // Skip the arguments adaptor frame if it exists. |
| 3644 NearLabel check_frame_marker; |
| 3645 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset), |
| 3646 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 3647 __ j(not_equal, &check_frame_marker); |
| 3648 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
| 3649 |
| 3650 // Check the marker in the calling frame. |
| 3651 __ bind(&check_frame_marker); |
| 3652 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 3653 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); |
| 3654 } |
| 3655 |
| 3656 |
| 3551 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 3657 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 3552 // No code for lazy bailout instruction. Used to capture environment after a | 3658 // No code for lazy bailout instruction. Used to capture environment after a |
| 3553 // call for populating the safepoint data with deoptimization data. | 3659 // call for populating the safepoint data with deoptimization data. |
| 3554 } | 3660 } |
| 3555 | 3661 |
| 3556 | 3662 |
| 3557 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 3663 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 3558 DeoptimizeIf(no_condition, instr->environment()); | 3664 DeoptimizeIf(no_condition, instr->environment()); |
| 3559 } | 3665 } |
| 3560 | 3666 |
| 3561 | 3667 |
| 3562 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 3668 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
| 3563 LOperand* obj = instr->object(); | 3669 LOperand* obj = instr->object(); |
| 3564 LOperand* key = instr->key(); | 3670 LOperand* key = instr->key(); |
| 3565 __ push(ToOperand(obj)); | 3671 __ push(ToOperand(obj)); |
| 3566 if (key->IsConstantOperand()) { | 3672 if (key->IsConstantOperand()) { |
| 3567 __ push(ToImmediate(key)); | 3673 __ push(ToImmediate(key)); |
| 3568 } else { | 3674 } else { |
| 3569 __ push(ToOperand(key)); | 3675 __ push(ToOperand(key)); |
| 3570 } | 3676 } |
| 3571 RecordPosition(instr->pointer_map()->position()); | 3677 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); |
| 3678 LPointerMap* pointers = instr->pointer_map(); |
| 3679 LEnvironment* env = instr->deoptimization_environment(); |
| 3680 RecordPosition(pointers->position()); |
| 3681 RegisterEnvironmentForDeoptimization(env); |
| 3572 SafepointGenerator safepoint_generator(this, | 3682 SafepointGenerator safepoint_generator(this, |
| 3573 instr->pointer_map(), | 3683 pointers, |
| 3574 Safepoint::kNoDeoptimizationIndex); | 3684 env->deoptimization_index()); |
| 3575 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); | 3685 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); |
| 3576 } | 3686 } |
| 3577 | 3687 |
| 3578 | 3688 |
| 3579 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 3689 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 3580 // Perform stack overflow check. | 3690 // Perform stack overflow check. |
| 3581 NearLabel done; | 3691 NearLabel done; |
| 3582 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); | 3692 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); |
| 3583 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 3693 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
| 3584 __ j(above_equal, &done); | 3694 __ j(above_equal, &done); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 3604 ASSERT(osr_pc_offset_ == -1); | 3714 ASSERT(osr_pc_offset_ == -1); |
| 3605 osr_pc_offset_ = masm()->pc_offset(); | 3715 osr_pc_offset_ = masm()->pc_offset(); |
| 3606 } | 3716 } |
| 3607 | 3717 |
| 3608 | 3718 |
| 3609 #undef __ | 3719 #undef __ |
| 3610 | 3720 |
| 3611 } } // namespace v8::internal | 3721 } } // namespace v8::internal |
| 3612 | 3722 |
| 3613 #endif // V8_TARGET_ARCH_IA32 | 3723 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |