| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #if V8_TARGET_ARCH_A64 | 30 #if V8_TARGET_ARCH_ARM64 |
| 31 | 31 |
| 32 #include "code-stubs.h" | 32 #include "code-stubs.h" |
| 33 #include "codegen.h" | 33 #include "codegen.h" |
| 34 #include "compiler.h" | 34 #include "compiler.h" |
| 35 #include "debug.h" | 35 #include "debug.h" |
| 36 #include "full-codegen.h" | 36 #include "full-codegen.h" |
| 37 #include "isolate-inl.h" | 37 #include "isolate-inl.h" |
| 38 #include "parser.h" | 38 #include "parser.h" |
| 39 #include "scopes.h" | 39 #include "scopes.h" |
| 40 #include "stub-cache.h" | 40 #include "stub-cache.h" |
| 41 | 41 |
| 42 #include "a64/code-stubs-a64.h" | 42 #include "arm64/code-stubs-arm64.h" |
| 43 #include "a64/macro-assembler-a64.h" | 43 #include "arm64/macro-assembler-arm64.h" |
| 44 | 44 |
| 45 namespace v8 { | 45 namespace v8 { |
| 46 namespace internal { | 46 namespace internal { |
| 47 | 47 |
| 48 #define __ ACCESS_MASM(masm_) | 48 #define __ ACCESS_MASM(masm_) |
| 49 | 49 |
| 50 class JumpPatchSite BASE_EMBEDDED { | 50 class JumpPatchSite BASE_EMBEDDED { |
| 51 public: | 51 public: |
| 52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) { | 52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) { |
| 53 #ifdef DEBUG | 53 #ifdef DEBUG |
| 54 info_emitted_ = false; | 54 info_emitted_ = false; |
| 55 #endif | 55 #endif |
| 56 } | 56 } |
| 57 | 57 |
| 58 ~JumpPatchSite() { | 58 ~JumpPatchSite() { |
| 59 if (patch_site_.is_bound()) { | 59 if (patch_site_.is_bound()) { |
| 60 ASSERT(info_emitted_); | 60 ASSERT(info_emitted_); |
| 61 } else { | 61 } else { |
| 62 ASSERT(reg_.IsNone()); | 62 ASSERT(reg_.IsNone()); |
| 63 } | 63 } |
| 64 } | 64 } |
| 65 | 65 |
| 66 void EmitJumpIfNotSmi(Register reg, Label* target) { | 66 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 67 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc. | 67 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. |
| 68 InstructionAccurateScope scope(masm_, 1); | 68 InstructionAccurateScope scope(masm_, 1); |
| 69 ASSERT(!info_emitted_); | 69 ASSERT(!info_emitted_); |
| 70 ASSERT(reg.Is64Bits()); | 70 ASSERT(reg.Is64Bits()); |
| 71 ASSERT(!reg.Is(csp)); | 71 ASSERT(!reg.Is(csp)); |
| 72 reg_ = reg; | 72 reg_ = reg; |
| 73 __ bind(&patch_site_); | 73 __ bind(&patch_site_); |
| 74 __ tbz(xzr, 0, target); // Always taken before patched. | 74 __ tbz(xzr, 0, target); // Always taken before patched. |
| 75 } | 75 } |
| 76 | 76 |
| 77 void EmitJumpIfSmi(Register reg, Label* target) { | 77 void EmitJumpIfSmi(Register reg, Label* target) { |
| 78 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc. | 78 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. |
| 79 InstructionAccurateScope scope(masm_, 1); | 79 InstructionAccurateScope scope(masm_, 1); |
| 80 ASSERT(!info_emitted_); | 80 ASSERT(!info_emitted_); |
| 81 ASSERT(reg.Is64Bits()); | 81 ASSERT(reg.Is64Bits()); |
| 82 ASSERT(!reg.Is(csp)); | 82 ASSERT(!reg.Is(csp)); |
| 83 reg_ = reg; | 83 reg_ = reg; |
| 84 __ bind(&patch_site_); | 84 __ bind(&patch_site_); |
| 85 __ tbnz(xzr, 0, target); // Never taken before patched. | 85 __ tbnz(xzr, 0, target); // Never taken before patched. |
| 86 } | 86 } |
| 87 | 87 |
| 88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) { | 88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) { |
| (...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 407 __ B(pl, &ok); | 407 __ B(pl, &ok); |
| 408 __ Push(x0); | 408 __ Push(x0); |
| 409 __ Call(isolate()->builtins()->InterruptCheck(), | 409 __ Call(isolate()->builtins()->InterruptCheck(), |
| 410 RelocInfo::CODE_TARGET); | 410 RelocInfo::CODE_TARGET); |
| 411 __ Pop(x0); | 411 __ Pop(x0); |
| 412 EmitProfilingCounterReset(); | 412 EmitProfilingCounterReset(); |
| 413 __ Bind(&ok); | 413 __ Bind(&ok); |
| 414 | 414 |
| 415 // Make sure that the constant pool is not emitted inside of the return | 415 // Make sure that the constant pool is not emitted inside of the return |
| 416 // sequence. This sequence can get patched when the debugger is used. See | 416 // sequence. This sequence can get patched when the debugger is used. See |
| 417 // debug-a64.cc:BreakLocationIterator::SetDebugBreakAtReturn(). | 417 // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn(). |
| 418 { | 418 { |
| 419 InstructionAccurateScope scope(masm_, | 419 InstructionAccurateScope scope(masm_, |
| 420 Assembler::kJSRetSequenceInstructions); | 420 Assembler::kJSRetSequenceInstructions); |
| 421 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 421 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
| 422 __ RecordJSReturn(); | 422 __ RecordJSReturn(); |
| 423 // This code is generated using Assembler methods rather than Macro | 423 // This code is generated using Assembler methods rather than Macro |
| 424 // Assembler methods because it will be patched later on, and so the size | 424 // Assembler methods because it will be patched later on, and so the size |
| 425 // of the generated code must be consistent. | 425 // of the generated code must be consistent. |
| 426 const Register& current_sp = __ StackPointer(); | 426 const Register& current_sp = __ StackPointer(); |
| 427 // Nothing ensures 16 bytes alignment here. | 427 // Nothing ensures 16 bytes alignment here. |
| (...skipping 3865 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4293 context()->Plug(if_true, if_false); | 4293 context()->Plug(if_true, if_false); |
| 4294 } | 4294 } |
| 4295 | 4295 |
| 4296 | 4296 |
| 4297 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | 4297 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
| 4298 Comment cmnt(masm_, "[ CompareOperation"); | 4298 Comment cmnt(masm_, "[ CompareOperation"); |
| 4299 SetSourcePosition(expr->position()); | 4299 SetSourcePosition(expr->position()); |
| 4300 | 4300 |
| 4301 // Try to generate an optimized comparison with a literal value. | 4301 // Try to generate an optimized comparison with a literal value. |
| 4302 // TODO(jbramley): This only checks common values like NaN or undefined. | 4302 // TODO(jbramley): This only checks common values like NaN or undefined. |
| 4303 // Should it also handle A64 immediate operands? | 4303 // Should it also handle ARM64 immediate operands? |
| 4304 if (TryLiteralCompare(expr)) { | 4304 if (TryLiteralCompare(expr)) { |
| 4305 return; | 4305 return; |
| 4306 } | 4306 } |
| 4307 | 4307 |
| 4308 // Assign labels according to context()->PrepareTest. | 4308 // Assign labels according to context()->PrepareTest. |
| 4309 Label materialize_true; | 4309 Label materialize_true; |
| 4310 Label materialize_false; | 4310 Label materialize_false; |
| 4311 Label* if_true = NULL; | 4311 Label* if_true = NULL; |
| 4312 Label* if_false = NULL; | 4312 Label* if_false = NULL; |
| 4313 Label* fall_through = NULL; | 4313 Label* fall_through = NULL; |
| (...skipping 658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4972 *context_length = 0; | 4972 *context_length = 0; |
| 4973 return previous_; | 4973 return previous_; |
| 4974 } | 4974 } |
| 4975 | 4975 |
| 4976 | 4976 |
| 4977 #undef __ | 4977 #undef __ |
| 4978 | 4978 |
| 4979 | 4979 |
| 4980 } } // namespace v8::internal | 4980 } } // namespace v8::internal |
| 4981 | 4981 |
| 4982 #endif // V8_TARGET_ARCH_A64 | 4982 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |