Index: src/arm64/full-codegen-arm64.cc |
diff --git a/src/a64/full-codegen-a64.cc b/src/arm64/full-codegen-arm64.cc |
similarity index 99% |
rename from src/a64/full-codegen-a64.cc |
rename to src/arm64/full-codegen-arm64.cc |
index b6489b21c8c26a61451def82cb52aa3e7a26c13f..8aa7e3708bd86f263dbdb3eb39663a236834ad7e 100644 |
--- a/src/a64/full-codegen-a64.cc |
+++ b/src/arm64/full-codegen-arm64.cc |
@@ -27,7 +27,7 @@ |
#include "v8.h" |
-#if V8_TARGET_ARCH_A64 |
+#if V8_TARGET_ARCH_ARM64 |
#include "code-stubs.h" |
#include "codegen.h" |
@@ -39,8 +39,8 @@ |
#include "scopes.h" |
#include "stub-cache.h" |
-#include "a64/code-stubs-a64.h" |
-#include "a64/macro-assembler-a64.h" |
+#include "arm64/code-stubs-arm64.h" |
+#include "arm64/macro-assembler-arm64.h" |
namespace v8 { |
namespace internal { |
@@ -64,7 +64,7 @@ class JumpPatchSite BASE_EMBEDDED { |
} |
void EmitJumpIfNotSmi(Register reg, Label* target) { |
- // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc. |
+ // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. |
InstructionAccurateScope scope(masm_, 1); |
ASSERT(!info_emitted_); |
ASSERT(reg.Is64Bits()); |
@@ -75,7 +75,7 @@ class JumpPatchSite BASE_EMBEDDED { |
} |
void EmitJumpIfSmi(Register reg, Label* target) { |
- // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc. |
+ // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. |
InstructionAccurateScope scope(masm_, 1); |
ASSERT(!info_emitted_); |
ASSERT(reg.Is64Bits()); |
@@ -414,7 +414,7 @@ void FullCodeGenerator::EmitReturnSequence() { |
// Make sure that the constant pool is not emitted inside of the return |
// sequence. This sequence can get patched when the debugger is used. See |
- // debug-a64.cc:BreakLocationIterator::SetDebugBreakAtReturn(). |
+ // debug-arm64.cc:BreakLocationIterator::SetDebugBreakAtReturn(). |
{ |
InstructionAccurateScope scope(masm_, |
Assembler::kJSRetSequenceInstructions); |
@@ -4300,7 +4300,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
// Try to generate an optimized comparison with a literal value. |
// TODO(jbramley): This only checks common values like NaN or undefined. |
- // Should it also handle A64 immediate operands? |
+ // Should it also handle ARM64 immediate operands? |
if (TryLiteralCompare(expr)) { |
return; |
} |
@@ -4979,4 +4979,4 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( |
} } // namespace v8::internal |
-#endif // V8_TARGET_ARCH_A64 |
+#endif // V8_TARGET_ARCH_ARM64 |