Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(390)

Side by Side Diff: src/arm64/assembler-arm64.cc

Issue 260003006: Added a Isolate* parameter to Serializer::enabled(). (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased. Feedback. Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/assembler-arm64.h ('k') | src/arm64/cpu-arm64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // 2 //
3 // Redistribution and use in source and binary forms, with or without 3 // Redistribution and use in source and binary forms, with or without
4 // modification, are permitted provided that the following conditions are 4 // modification, are permitted provided that the following conditions are
5 // met: 5 // met:
6 // 6 //
7 // * Redistributions of source code must retain the above copyright 7 // * Redistributions of source code must retain the above copyright
8 // notice, this list of conditions and the following disclaimer. 8 // notice, this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above 9 // * Redistributions in binary form must reproduce the above
10 // copyright notice, this list of conditions and the following 10 // copyright notice, this list of conditions and the following
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 immediate_ = reinterpret_cast<intptr_t>(handle.location()); 264 immediate_ = reinterpret_cast<intptr_t>(handle.location());
265 rmode_ = RelocInfo::EMBEDDED_OBJECT; 265 rmode_ = RelocInfo::EMBEDDED_OBJECT;
266 } else { 266 } else {
267 STATIC_ASSERT(sizeof(intptr_t) == sizeof(int64_t)); 267 STATIC_ASSERT(sizeof(intptr_t) == sizeof(int64_t));
268 immediate_ = reinterpret_cast<intptr_t>(obj); 268 immediate_ = reinterpret_cast<intptr_t>(obj);
269 rmode_ = RelocInfo::NONE64; 269 rmode_ = RelocInfo::NONE64;
270 } 270 }
271 } 271 }
272 272
273 273
274 bool Operand::NeedsRelocation() const { 274 bool Operand::NeedsRelocation(Isolate* isolate) const {
275 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 275 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
276 return Serializer::enabled(); 276 return Serializer::enabled(isolate);
277 } 277 }
278 278
279 return !RelocInfo::IsNone(rmode_); 279 return !RelocInfo::IsNone(rmode_);
280 } 280 }
281 281
282 282
283 // Assembler 283 // Assembler
284 284
285 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 285 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
286 : AssemblerBase(isolate, buffer, buffer_size), 286 : AssemblerBase(isolate, buffer, buffer_size),
(...skipping 1609 matching lines...) Expand 10 before | Expand all | Expand 10 after
1896 Rd(rd) | ImmMoveWide(imm) | ShiftMoveWide(shift)); 1896 Rd(rd) | ImmMoveWide(imm) | ShiftMoveWide(shift));
1897 } 1897 }
1898 1898
1899 1899
1900 void Assembler::AddSub(const Register& rd, 1900 void Assembler::AddSub(const Register& rd,
1901 const Register& rn, 1901 const Register& rn,
1902 const Operand& operand, 1902 const Operand& operand,
1903 FlagsUpdate S, 1903 FlagsUpdate S,
1904 AddSubOp op) { 1904 AddSubOp op) {
1905 ASSERT(rd.SizeInBits() == rn.SizeInBits()); 1905 ASSERT(rd.SizeInBits() == rn.SizeInBits());
1906 ASSERT(!operand.NeedsRelocation()); 1906 ASSERT(!operand.NeedsRelocation(isolate()));
1907 if (operand.IsImmediate()) { 1907 if (operand.IsImmediate()) {
1908 int64_t immediate = operand.immediate(); 1908 int64_t immediate = operand.immediate();
1909 ASSERT(IsImmAddSub(immediate)); 1909 ASSERT(IsImmAddSub(immediate));
1910 Instr dest_reg = (S == SetFlags) ? Rd(rd) : RdSP(rd); 1910 Instr dest_reg = (S == SetFlags) ? Rd(rd) : RdSP(rd);
1911 Emit(SF(rd) | AddSubImmediateFixed | op | Flags(S) | 1911 Emit(SF(rd) | AddSubImmediateFixed | op | Flags(S) |
1912 ImmAddSub(immediate) | dest_reg | RnSP(rn)); 1912 ImmAddSub(immediate) | dest_reg | RnSP(rn));
1913 } else if (operand.IsShiftedRegister()) { 1913 } else if (operand.IsShiftedRegister()) {
1914 ASSERT(operand.reg().SizeInBits() == rd.SizeInBits()); 1914 ASSERT(operand.reg().SizeInBits() == rd.SizeInBits());
1915 ASSERT(operand.shift() != ROR); 1915 ASSERT(operand.shift() != ROR);
1916 1916
(...skipping 19 matching lines...) Expand all
1936 1936
1937 1937
1938 void Assembler::AddSubWithCarry(const Register& rd, 1938 void Assembler::AddSubWithCarry(const Register& rd,
1939 const Register& rn, 1939 const Register& rn,
1940 const Operand& operand, 1940 const Operand& operand,
1941 FlagsUpdate S, 1941 FlagsUpdate S,
1942 AddSubWithCarryOp op) { 1942 AddSubWithCarryOp op) {
1943 ASSERT(rd.SizeInBits() == rn.SizeInBits()); 1943 ASSERT(rd.SizeInBits() == rn.SizeInBits());
1944 ASSERT(rd.SizeInBits() == operand.reg().SizeInBits()); 1944 ASSERT(rd.SizeInBits() == operand.reg().SizeInBits());
1945 ASSERT(operand.IsShiftedRegister() && (operand.shift_amount() == 0)); 1945 ASSERT(operand.IsShiftedRegister() && (operand.shift_amount() == 0));
1946 ASSERT(!operand.NeedsRelocation()); 1946 ASSERT(!operand.NeedsRelocation(isolate()));
1947 Emit(SF(rd) | op | Flags(S) | Rm(operand.reg()) | Rn(rn) | Rd(rd)); 1947 Emit(SF(rd) | op | Flags(S) | Rm(operand.reg()) | Rn(rn) | Rd(rd));
1948 } 1948 }
1949 1949
1950 1950
1951 void Assembler::hlt(int code) { 1951 void Assembler::hlt(int code) {
1952 ASSERT(is_uint16(code)); 1952 ASSERT(is_uint16(code));
1953 Emit(HLT | ImmException(code)); 1953 Emit(HLT | ImmException(code));
1954 } 1954 }
1955 1955
1956 1956
1957 void Assembler::brk(int code) { 1957 void Assembler::brk(int code) {
1958 ASSERT(is_uint16(code)); 1958 ASSERT(is_uint16(code));
1959 Emit(BRK | ImmException(code)); 1959 Emit(BRK | ImmException(code));
1960 } 1960 }
1961 1961
1962 1962
1963 void Assembler::debug(const char* message, uint32_t code, Instr params) { 1963 void Assembler::debug(const char* message, uint32_t code, Instr params) {
1964 #ifdef USE_SIMULATOR 1964 #ifdef USE_SIMULATOR
1965 // Don't generate simulator specific code if we are building a snapshot, which 1965 // Don't generate simulator specific code if we are building a snapshot, which
1966 // might be run on real hardware. 1966 // might be run on real hardware.
1967 if (!Serializer::enabled()) { 1967 if (!Serializer::enabled(isolate())) {
1968 // The arguments to the debug marker need to be contiguous in memory, so 1968 // The arguments to the debug marker need to be contiguous in memory, so
1969 // make sure we don't try to emit pools. 1969 // make sure we don't try to emit pools.
1970 BlockPoolsScope scope(this); 1970 BlockPoolsScope scope(this);
1971 1971
1972 Label start; 1972 Label start;
1973 bind(&start); 1973 bind(&start);
1974 1974
1975 // Refer to instructions-arm64.h for a description of the marker and its 1975 // Refer to instructions-arm64.h for a description of the marker and its
1976 // arguments. 1976 // arguments.
1977 hlt(kImmExceptionIsDebug); 1977 hlt(kImmExceptionIsDebug);
(...skipping 14 matching lines...) Expand all
1992 hlt(kImmExceptionIsDebug); 1992 hlt(kImmExceptionIsDebug);
1993 } 1993 }
1994 } 1994 }
1995 1995
1996 1996
1997 void Assembler::Logical(const Register& rd, 1997 void Assembler::Logical(const Register& rd,
1998 const Register& rn, 1998 const Register& rn,
1999 const Operand& operand, 1999 const Operand& operand,
2000 LogicalOp op) { 2000 LogicalOp op) {
2001 ASSERT(rd.SizeInBits() == rn.SizeInBits()); 2001 ASSERT(rd.SizeInBits() == rn.SizeInBits());
2002 ASSERT(!operand.NeedsRelocation()); 2002 ASSERT(!operand.NeedsRelocation(isolate()));
2003 if (operand.IsImmediate()) { 2003 if (operand.IsImmediate()) {
2004 int64_t immediate = operand.immediate(); 2004 int64_t immediate = operand.immediate();
2005 unsigned reg_size = rd.SizeInBits(); 2005 unsigned reg_size = rd.SizeInBits();
2006 2006
2007 ASSERT(immediate != 0); 2007 ASSERT(immediate != 0);
2008 ASSERT(immediate != -1); 2008 ASSERT(immediate != -1);
2009 ASSERT(rd.Is64Bits() || is_uint32(immediate)); 2009 ASSERT(rd.Is64Bits() || is_uint32(immediate));
2010 2010
2011 // If the operation is NOT, invert the operation and immediate. 2011 // If the operation is NOT, invert the operation and immediate.
2012 if ((op & NOT) == NOT) { 2012 if ((op & NOT) == NOT) {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2044 Rn(rn)); 2044 Rn(rn));
2045 } 2045 }
2046 2046
2047 2047
2048 void Assembler::ConditionalCompare(const Register& rn, 2048 void Assembler::ConditionalCompare(const Register& rn,
2049 const Operand& operand, 2049 const Operand& operand,
2050 StatusFlags nzcv, 2050 StatusFlags nzcv,
2051 Condition cond, 2051 Condition cond,
2052 ConditionalCompareOp op) { 2052 ConditionalCompareOp op) {
2053 Instr ccmpop; 2053 Instr ccmpop;
2054 ASSERT(!operand.NeedsRelocation()); 2054 ASSERT(!operand.NeedsRelocation(isolate()));
2055 if (operand.IsImmediate()) { 2055 if (operand.IsImmediate()) {
2056 int64_t immediate = operand.immediate(); 2056 int64_t immediate = operand.immediate();
2057 ASSERT(IsImmConditionalCompare(immediate)); 2057 ASSERT(IsImmConditionalCompare(immediate));
2058 ccmpop = ConditionalCompareImmediateFixed | op | ImmCondCmp(immediate); 2058 ccmpop = ConditionalCompareImmediateFixed | op | ImmCondCmp(immediate);
2059 } else { 2059 } else {
2060 ASSERT(operand.IsShiftedRegister() && (operand.shift_amount() == 0)); 2060 ASSERT(operand.IsShiftedRegister() && (operand.shift_amount() == 0));
2061 ccmpop = ConditionalCompareRegisterFixed | op | Rm(operand.reg()); 2061 ccmpop = ConditionalCompareRegisterFixed | op | Rm(operand.reg());
2062 } 2062 }
2063 Emit(SF(rn) | ccmpop | Cond(cond) | Rn(rn) | Nzcv(nzcv)); 2063 Emit(SF(rn) | ccmpop | Cond(cond) | Rn(rn) | Nzcv(nzcv));
2064 } 2064 }
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
2159 } 2159 }
2160 2160
2161 2161
2162 void Assembler::DataProcShiftedRegister(const Register& rd, 2162 void Assembler::DataProcShiftedRegister(const Register& rd,
2163 const Register& rn, 2163 const Register& rn,
2164 const Operand& operand, 2164 const Operand& operand,
2165 FlagsUpdate S, 2165 FlagsUpdate S,
2166 Instr op) { 2166 Instr op) {
2167 ASSERT(operand.IsShiftedRegister()); 2167 ASSERT(operand.IsShiftedRegister());
2168 ASSERT(rn.Is64Bits() || (rn.Is32Bits() && is_uint5(operand.shift_amount()))); 2168 ASSERT(rn.Is64Bits() || (rn.Is32Bits() && is_uint5(operand.shift_amount())));
2169 ASSERT(!operand.NeedsRelocation()); 2169 ASSERT(!operand.NeedsRelocation(isolate()));
2170 Emit(SF(rd) | op | Flags(S) | 2170 Emit(SF(rd) | op | Flags(S) |
2171 ShiftDP(operand.shift()) | ImmDPShift(operand.shift_amount()) | 2171 ShiftDP(operand.shift()) | ImmDPShift(operand.shift_amount()) |
2172 Rm(operand.reg()) | Rn(rn) | Rd(rd)); 2172 Rm(operand.reg()) | Rn(rn) | Rd(rd));
2173 } 2173 }
2174 2174
2175 2175
2176 void Assembler::DataProcExtendedRegister(const Register& rd, 2176 void Assembler::DataProcExtendedRegister(const Register& rd,
2177 const Register& rn, 2177 const Register& rn,
2178 const Operand& operand, 2178 const Operand& operand,
2179 FlagsUpdate S, 2179 FlagsUpdate S,
2180 Instr op) { 2180 Instr op) {
2181 ASSERT(!operand.NeedsRelocation()); 2181 ASSERT(!operand.NeedsRelocation(isolate()));
2182 Instr dest_reg = (S == SetFlags) ? Rd(rd) : RdSP(rd); 2182 Instr dest_reg = (S == SetFlags) ? Rd(rd) : RdSP(rd);
2183 Emit(SF(rd) | op | Flags(S) | Rm(operand.reg()) | 2183 Emit(SF(rd) | op | Flags(S) | Rm(operand.reg()) |
2184 ExtendMode(operand.extend()) | ImmExtendShift(operand.shift_amount()) | 2184 ExtendMode(operand.extend()) | ImmExtendShift(operand.shift_amount()) |
2185 dest_reg | RnSP(rn)); 2185 dest_reg | RnSP(rn));
2186 } 2186 }
2187 2187
2188 2188
2189 bool Assembler::IsImmAddSub(int64_t immediate) { 2189 bool Assembler::IsImmAddSub(int64_t immediate) {
2190 return is_uint12(immediate) || 2190 return is_uint12(immediate) ||
2191 (is_uint12(immediate >> 12) && ((immediate & 0xfff) == 0)); 2191 (is_uint12(immediate >> 12) && ((immediate & 0xfff) == 0));
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after
2510 } 2510 }
2511 pending_reloc_info_[num_pending_reloc_info_++] = rinfo; 2511 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2512 // Make sure the constant pool is not emitted in place of the next 2512 // Make sure the constant pool is not emitted in place of the next
2513 // instruction for which we just recorded relocation info. 2513 // instruction for which we just recorded relocation info.
2514 BlockConstPoolFor(1); 2514 BlockConstPoolFor(1);
2515 } 2515 }
2516 2516
2517 if (!RelocInfo::IsNone(rmode)) { 2517 if (!RelocInfo::IsNone(rmode)) {
2518 // Don't record external references unless the heap will be serialized. 2518 // Don't record external references unless the heap will be serialized.
2519 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2519 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2520 if (!Serializer::enabled() && !emit_debug_code()) { 2520 if (!Serializer::enabled(isolate()) && !emit_debug_code()) {
2521 return; 2521 return;
2522 } 2522 }
2523 } 2523 }
2524 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 2524 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
2525 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { 2525 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
2526 RelocInfo reloc_info_with_ast_id( 2526 RelocInfo reloc_info_with_ast_id(
2527 reinterpret_cast<byte*>(pc_), rmode, RecordedAstId().ToInt(), NULL); 2527 reinterpret_cast<byte*>(pc_), rmode, RecordedAstId().ToInt(), NULL);
2528 ClearRecordedAstId(); 2528 ClearRecordedAstId();
2529 reloc_info_writer.Write(&reloc_info_with_ast_id); 2529 reloc_info_writer.Write(&reloc_info_with_ast_id);
2530 } else { 2530 } else {
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after
2894 adr(rd, 0); 2894 adr(rd, 0);
2895 MovInt64(scratch, target_offset); 2895 MovInt64(scratch, target_offset);
2896 add(rd, rd, scratch); 2896 add(rd, rd, scratch);
2897 } 2897 }
2898 } 2898 }
2899 2899
2900 2900
2901 } } // namespace v8::internal 2901 } } // namespace v8::internal
2902 2902
2903 #endif // V8_TARGET_ARCH_ARM64 2903 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/assembler-arm64.h ('k') | src/arm64/cpu-arm64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698