| Index: src/arm64/macro-assembler-arm64.cc
|
| diff --git a/src/a64/macro-assembler-a64.cc b/src/arm64/macro-assembler-arm64.cc
|
| similarity index 99%
|
| rename from src/a64/macro-assembler-a64.cc
|
| rename to src/arm64/macro-assembler-arm64.cc
|
| index 851f10aba83fcec7687e21c62ca6a86646246bbd..d7d0ab7502308f2687beb126076c70e4e3ca1905 100644
|
| --- a/src/a64/macro-assembler-a64.cc
|
| +++ b/src/arm64/macro-assembler-arm64.cc
|
| @@ -27,7 +27,7 @@
|
|
|
| #include "v8.h"
|
|
|
| -#if V8_TARGET_ARCH_A64
|
| +#if V8_TARGET_ARCH_ARM64
|
|
|
| #include "bootstrapper.h"
|
| #include "codegen.h"
|
| @@ -1854,19 +1854,19 @@ void MacroAssembler::InitializeNewString(Register string,
|
|
|
|
|
| int MacroAssembler::ActivationFrameAlignment() {
|
| -#if V8_HOST_ARCH_A64
|
| +#if V8_HOST_ARCH_ARM64
|
| // Running on the real platform. Use the alignment as mandated by the local
|
| // environment.
|
| // Note: This will break if we ever start generating snapshots on one ARM
|
| // platform for another ARM platform with a different alignment.
|
| return OS::ActivationFrameAlignment();
|
| -#else // V8_HOST_ARCH_A64
|
| +#else // V8_HOST_ARCH_ARM64
|
| // If we are using the simulator then we should always align to the expected
|
| // alignment. As the simulator is used to generate snapshots we do not know
|
| // if the target platform will need alignment, so this is controlled from a
|
| // flag.
|
| return FLAG_sim_stack_alignment;
|
| -#endif // V8_HOST_ARCH_A64
|
| +#endif // V8_HOST_ARCH_ARM64
|
| }
|
|
|
|
|
| @@ -3252,7 +3252,7 @@ void MacroAssembler::Allocate(int object_size,
|
| }
|
|
|
| // We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have
|
| - // the same alignment on A64.
|
| + // the same alignment on ARM64.
|
| STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
|
|
|
| // Calculate new top and bail out if new space is exhausted.
|
| @@ -3324,7 +3324,7 @@ void MacroAssembler::Allocate(Register object_size,
|
| }
|
|
|
| // We can ignore DOUBLE_ALIGNMENT flags here because doubles and pointers have
|
| - // the same alignment on A64.
|
| + // the same alignment on ARM64.
|
| STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
|
|
|
| // Calculate new top and bail out if new space is exhausted
|
| @@ -3839,7 +3839,7 @@ void MacroAssembler::CheckFastObjectElements(Register map,
|
|
|
|
|
| // Note: The ARM version of this clobbers elements_reg, but this version does
|
| -// not. Some uses of this in A64 assume that elements_reg will be preserved.
|
| +// not. Some uses of this in ARM64 assume that elements_reg will be preserved.
|
| void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
|
| Register key_reg,
|
| Register elements_reg,
|
| @@ -5025,7 +5025,7 @@ void MacroAssembler::EmitCodeAgeSequence(Assembler * assm,
|
| __ LoadLiteral(ip0, kCodeAgeStubEntryOffset);
|
| __ adr(x0, &start);
|
| __ br(ip0);
|
| - // IsCodeAgeSequence in codegen-a64.cc assumes that the code generated up
|
| + // IsCodeAgeSequence in codegen-arm64.cc assumes that the code generated up
|
| // until now (kCodeAgeStubEntryOffset) is the same for all code age sequences.
|
| __ AssertSizeOfCodeGeneratedSince(&start, kCodeAgeStubEntryOffset);
|
| if (stub) {
|
| @@ -5168,4 +5168,4 @@ InlineSmiCheckInfo::InlineSmiCheckInfo(Address info)
|
|
|
| } } // namespace v8::internal
|
|
|
| -#endif // V8_TARGET_ARCH_A64
|
| +#endif // V8_TARGET_ARCH_ARM64
|
|
|