| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3593 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3604 | 3604 |
| 3605 __ bind(&done); | 3605 __ bind(&done); |
| 3606 context()->Plug(v0); | 3606 context()->Plug(v0); |
| 3607 } | 3607 } |
| 3608 | 3608 |
| 3609 | 3609 |
| 3610 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { | 3610 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { |
| 3611 ZoneList<Expression*>* args = expr->arguments(); | 3611 ZoneList<Expression*>* args = expr->arguments(); |
| 3612 ASSERT_EQ(args->length(), 1); | 3612 ASSERT_EQ(args->length(), 1); |
| 3613 | 3613 |
| 3614 // Load the argument on the stack and call the stub. | 3614 // Load the argument into a0 and call the stub. |
| 3615 VisitForStackValue(args->at(0)); | 3615 VisitForAccumulatorValue(args->at(0)); |
| 3616 __ mov(a0, result_register()); |
| 3616 | 3617 |
| 3617 NumberToStringStub stub; | 3618 NumberToStringStub stub; |
| 3618 __ CallStub(&stub); | 3619 __ CallStub(&stub); |
| 3619 context()->Plug(v0); | 3620 context()->Plug(v0); |
| 3620 } | 3621 } |
| 3621 | 3622 |
| 3622 | 3623 |
| 3623 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | 3624 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { |
| 3624 ZoneList<Expression*>* args = expr->arguments(); | 3625 ZoneList<Expression*>* args = expr->arguments(); |
| 3625 ASSERT(args->length() == 1); | 3626 ASSERT(args->length() == 1); |
| (...skipping 1292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4918 | 4919 |
| 4919 *stack_depth = 0; | 4920 *stack_depth = 0; |
| 4920 *context_length = 0; | 4921 *context_length = 0; |
| 4921 return previous_; | 4922 return previous_; |
| 4922 } | 4923 } |
| 4923 | 4924 |
| 4924 | 4925 |
| 4925 #undef __ | 4926 #undef __ |
| 4926 | 4927 |
| 4927 | 4928 |
| 4928 // This structure comes from FullCodeGenerator::EmitBackEdgeBookkeeping. | |
| 4929 // The back edge bookkeeping code matches the pattern: | |
| 4930 // | |
| 4931 // sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts) | |
| 4932 // beq at, zero_reg, ok | |
| 4933 // lui t9, <interrupt stub address> upper | |
| 4934 // ori t9, <interrupt stub address> lower | |
| 4935 // jalr t9 | |
| 4936 // nop | |
| 4937 // ok-label ----- pc_after points here | |
| 4938 // | |
| 4939 // We patch the code to the following form: | |
| 4940 // | |
| 4941 // addiu at, zero_reg, 1 | |
| 4942 // beq at, zero_reg, ok ;; Not changed | |
| 4943 // lui t9, <on-stack replacement address> upper | |
| 4944 // ori t9, <on-stack replacement address> lower | |
| 4945 // jalr t9 ;; Not changed | |
| 4946 // nop ;; Not changed | |
| 4947 // ok-label ----- pc_after points here | |
| 4948 | |
| 4949 void BackEdgeTable::PatchAt(Code* unoptimized_code, | 4929 void BackEdgeTable::PatchAt(Code* unoptimized_code, |
| 4950 Address pc_after, | 4930 Address pc, |
| 4931 BackEdgeState target_state, |
| 4951 Code* replacement_code) { | 4932 Code* replacement_code) { |
| 4952 static const int kInstrSize = Assembler::kInstrSize; | 4933 static const int kInstrSize = Assembler::kInstrSize; |
| 4953 // Replace the sltu instruction with load-imm 1 to at, so beq is not taken. | 4934 Address branch_address = pc - 6 * kInstrSize; |
| 4954 CodePatcher patcher(pc_after - 6 * kInstrSize, 1); | 4935 CodePatcher patcher(branch_address, 1); |
| 4955 patcher.masm()->addiu(at, zero_reg, 1); | 4936 |
| 4937 switch (target_state) { |
| 4938 case INTERRUPT: |
| 4939 // slt at, a3, zero_reg (in case of count based interrupts) |
| 4940 // beq at, zero_reg, ok |
| 4941 // lui t9, <interrupt stub address> upper |
| 4942 // ori t9, <interrupt stub address> lower |
| 4943 // jalr t9 |
| 4944 // nop |
| 4945 // ok-label ----- pc_after points here |
| 4946 patcher.masm()->slt(at, a3, zero_reg); |
| 4947 break; |
| 4948 case ON_STACK_REPLACEMENT: |
| 4949 case OSR_AFTER_STACK_CHECK: |
| 4950 // addiu at, zero_reg, 1 |
| 4951 // beq at, zero_reg, ok ;; Not changed |
| 4952 // lui t9, <on-stack replacement address> upper |
| 4953 // ori t9, <on-stack replacement address> lower |
| 4954 // jalr t9 ;; Not changed |
| 4955 // nop ;; Not changed |
| 4956 // ok-label ----- pc_after points here |
| 4957 patcher.masm()->addiu(at, zero_reg, 1); |
| 4958 break; |
| 4959 } |
| 4960 Address pc_immediate_load_address = pc - 4 * kInstrSize; |
| 4956 // Replace the stack check address in the load-immediate (lui/ori pair) | 4961 // Replace the stack check address in the load-immediate (lui/ori pair) |
| 4957 // with the entry address of the replacement code. | 4962 // with the entry address of the replacement code. |
| 4958 Assembler::set_target_address_at(pc_after - 4 * kInstrSize, | 4963 Assembler::set_target_address_at(pc_immediate_load_address, |
| 4959 replacement_code->entry()); | 4964 replacement_code->entry()); |
| 4960 | 4965 |
| 4961 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4966 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 4962 unoptimized_code, pc_after - 4 * kInstrSize, replacement_code); | 4967 unoptimized_code, pc_immediate_load_address, replacement_code); |
| 4963 } | 4968 } |
| 4964 | 4969 |
| 4965 | 4970 |
| 4966 void BackEdgeTable::RevertAt(Code* unoptimized_code, | 4971 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( |
| 4967 Address pc_after, | 4972 Isolate* isolate, |
| 4968 Code* interrupt_code) { | 4973 Code* unoptimized_code, |
| 4974 Address pc) { |
| 4969 static const int kInstrSize = Assembler::kInstrSize; | 4975 static const int kInstrSize = Assembler::kInstrSize; |
| 4970 // Restore the sltu instruction so beq can be taken again. | 4976 Address branch_address = pc - 6 * kInstrSize; |
| 4971 CodePatcher patcher(pc_after - 6 * kInstrSize, 1); | 4977 Address pc_immediate_load_address = pc - 4 * kInstrSize; |
| 4972 patcher.masm()->slt(at, a3, zero_reg); | |
| 4973 // Restore the original call address. | |
| 4974 Assembler::set_target_address_at(pc_after - 4 * kInstrSize, | |
| 4975 interrupt_code->entry()); | |
| 4976 | 4978 |
| 4977 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4979 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize))); |
| 4978 unoptimized_code, pc_after - 4 * kInstrSize, interrupt_code); | 4980 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { |
| 4981 ASSERT(reinterpret_cast<uint32_t>( |
| 4982 Assembler::target_address_at(pc_immediate_load_address)) == |
| 4983 reinterpret_cast<uint32_t>( |
| 4984 isolate->builtins()->InterruptCheck()->entry())); |
| 4985 return INTERRUPT; |
| 4986 } |
| 4987 |
| 4988 ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); |
| 4989 |
| 4990 if (reinterpret_cast<uint32_t>( |
| 4991 Assembler::target_address_at(pc_immediate_load_address)) == |
| 4992 reinterpret_cast<uint32_t>( |
| 4993 isolate->builtins()->OnStackReplacement()->entry())) { |
| 4994 return ON_STACK_REPLACEMENT; |
| 4995 } |
| 4996 |
| 4997 ASSERT(reinterpret_cast<uint32_t>( |
| 4998 Assembler::target_address_at(pc_immediate_load_address)) == |
| 4999 reinterpret_cast<uint32_t>( |
| 5000 isolate->builtins()->OsrAfterStackCheck()->entry())); |
| 5001 return OSR_AFTER_STACK_CHECK; |
| 4979 } | 5002 } |
| 4980 | 5003 |
| 4981 | 5004 |
| 4982 #ifdef DEBUG | |
| 4983 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | |
| 4984 Isolate* isolate, | |
| 4985 Code* unoptimized_code, | |
| 4986 Address pc_after) { | |
| 4987 static const int kInstrSize = Assembler::kInstrSize; | |
| 4988 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize))); | |
| 4989 if (Assembler::IsAddImmediate( | |
| 4990 Assembler::instr_at(pc_after - 6 * kInstrSize))) { | |
| 4991 Code* osr_builtin = | |
| 4992 isolate->builtins()->builtin(Builtins::kOnStackReplacement); | |
| 4993 ASSERT(reinterpret_cast<uint32_t>( | |
| 4994 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == | |
| 4995 reinterpret_cast<uint32_t>(osr_builtin->entry())); | |
| 4996 return ON_STACK_REPLACEMENT; | |
| 4997 } else { | |
| 4998 // Get the interrupt stub code object to match against from cache. | |
| 4999 Code* interrupt_builtin = | |
| 5000 isolate->builtins()->builtin(Builtins::kInterruptCheck); | |
| 5001 ASSERT(reinterpret_cast<uint32_t>( | |
| 5002 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == | |
| 5003 reinterpret_cast<uint32_t>(interrupt_builtin->entry())); | |
| 5004 return INTERRUPT; | |
| 5005 } | |
| 5006 } | |
| 5007 #endif // DEBUG | |
| 5008 | |
| 5009 | |
| 5010 } } // namespace v8::internal | 5005 } } // namespace v8::internal |
| 5011 | 5006 |
| 5012 #endif // V8_TARGET_ARCH_MIPS | 5007 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |