Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/arm64/assembler-arm64-inl.h

Issue 318773009: ARM64: Clean up support for explicit literal load. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ 6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_
7 7
8 #include "src/arm64/assembler-arm64.h" 8 #include "src/arm64/assembler-arm64.h"
9 #include "src/cpu.h" 9 #include "src/cpu.h"
10 #include "src/debug.h" 10 #include "src/debug.h"
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
254 return FPRegister::SRegFromCode(reg_code); 254 return FPRegister::SRegFromCode(reg_code);
255 } 255 }
256 256
257 257
258 inline FPRegister CPURegister::D() const { 258 inline FPRegister CPURegister::D() const {
259 ASSERT(IsValidFPRegister()); 259 ASSERT(IsValidFPRegister());
260 return FPRegister::DRegFromCode(reg_code); 260 return FPRegister::DRegFromCode(reg_code);
261 } 261 }
262 262
263 263
264 // Operand. 264 // Immediate.
265 // Default initializer is for int types
265 template<typename T> 266 template<typename T>
266 Operand::Operand(Handle<T> value) : reg_(NoReg) { 267 struct ImmediateInitializer {
267 initialize_handle(value);
268 }
269
270
271 // Default initializer is for int types
272 template<typename int_t>
273 struct OperandInitializer {
274 static const bool kIsIntType = true; 268 static const bool kIsIntType = true;
275 static inline RelocInfo::Mode rmode_for(int_t) { 269 static inline RelocInfo::Mode rmode_for(T) {
276 return sizeof(int_t) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; 270 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32;
277 } 271 }
278 static inline int64_t immediate_for(int_t t) { 272 static inline int64_t immediate_for(T t) {
279 STATIC_ASSERT(sizeof(int_t) <= 8); 273 STATIC_ASSERT(sizeof(T) <= 8);
280 return t; 274 return t;
281 } 275 }
282 }; 276 };
283 277
284 278
285 template<> 279 template<>
286 struct OperandInitializer<Smi*> { 280 struct ImmediateInitializer<Smi*> {
287 static const bool kIsIntType = false; 281 static const bool kIsIntType = false;
288 static inline RelocInfo::Mode rmode_for(Smi* t) { 282 static inline RelocInfo::Mode rmode_for(Smi* t) {
289 return RelocInfo::NONE64; 283 return RelocInfo::NONE64;
290 } 284 }
291 static inline int64_t immediate_for(Smi* t) {; 285 static inline int64_t immediate_for(Smi* t) {;
292 return reinterpret_cast<int64_t>(t); 286 return reinterpret_cast<int64_t>(t);
293 } 287 }
294 }; 288 };
295 289
296 290
297 template<> 291 template<>
298 struct OperandInitializer<ExternalReference> { 292 struct ImmediateInitializer<ExternalReference> {
299 static const bool kIsIntType = false; 293 static const bool kIsIntType = false;
300 static inline RelocInfo::Mode rmode_for(ExternalReference t) { 294 static inline RelocInfo::Mode rmode_for(ExternalReference t) {
301 return RelocInfo::EXTERNAL_REFERENCE; 295 return RelocInfo::EXTERNAL_REFERENCE;
302 } 296 }
303 static inline int64_t immediate_for(ExternalReference t) {; 297 static inline int64_t immediate_for(ExternalReference t) {;
304 return reinterpret_cast<int64_t>(t.address()); 298 return reinterpret_cast<int64_t>(t.address());
305 } 299 }
306 }; 300 };
307 301
308 302
309 template<typename T> 303 template<typename T>
310 Operand::Operand(T t) 304 Immediate::Immediate(Handle<T> value) {
311 : immediate_(OperandInitializer<T>::immediate_for(t)), 305 InitializeHandle(value);
312 reg_(NoReg), 306 }
313 rmode_(OperandInitializer<T>::rmode_for(t)) {} 307
308
309 template<typename T>
310 Immediate::Immediate(T t)
311 : value_(ImmediateInitializer<T>::immediate_for(t)),
312 rmode_(ImmediateInitializer<T>::rmode_for(t)) {}
313
314
315 template<typename T>
316 Immediate::Immediate(T t, RelocInfo::Mode rmode)
317 : value_(ImmediateInitializer<T>::immediate_for(t)),
318 rmode_(rmode) {
319 STATIC_ASSERT(ImmediateInitializer<T>::kIsIntType);
320 }
321
322
323 // Operand.
324 template<typename T>
325 Operand::Operand(Handle<T> value) : immediate_(value), reg_(NoReg) {}
326
327
328 template<typename T>
329 Operand::Operand(T t) : immediate_(t), reg_(NoReg) {}
314 330
315 331
316 template<typename T> 332 template<typename T>
317 Operand::Operand(T t, RelocInfo::Mode rmode) 333 Operand::Operand(T t, RelocInfo::Mode rmode)
318 : immediate_(OperandInitializer<T>::immediate_for(t)), 334 : immediate_(t, rmode),
319 reg_(NoReg), 335 reg_(NoReg) {}
320 rmode_(rmode) {
321 STATIC_ASSERT(OperandInitializer<T>::kIsIntType);
322 }
323 336
324 337
325 Operand::Operand(Register reg, Shift shift, unsigned shift_amount) 338 Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
326 : reg_(reg), 339 : immediate_(0),
340 reg_(reg),
327 shift_(shift), 341 shift_(shift),
328 extend_(NO_EXTEND), 342 extend_(NO_EXTEND),
329 shift_amount_(shift_amount), 343 shift_amount_(shift_amount) {
330 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) {
331 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSizeInBits)); 344 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
332 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSizeInBits)); 345 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
333 ASSERT(!reg.IsSP()); 346 ASSERT(!reg.IsSP());
334 } 347 }
335 348
336 349
337 Operand::Operand(Register reg, Extend extend, unsigned shift_amount) 350 Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
338 : reg_(reg), 351 : immediate_(0),
352 reg_(reg),
339 shift_(NO_SHIFT), 353 shift_(NO_SHIFT),
340 extend_(extend), 354 extend_(extend),
341 shift_amount_(shift_amount), 355 shift_amount_(shift_amount) {
342 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) {
343 ASSERT(reg.IsValid()); 356 ASSERT(reg.IsValid());
344 ASSERT(shift_amount <= 4); 357 ASSERT(shift_amount <= 4);
345 ASSERT(!reg.IsSP()); 358 ASSERT(!reg.IsSP());
346 359
347 // Extend modes SXTX and UXTX require a 64-bit register. 360 // Extend modes SXTX and UXTX require a 64-bit register.
348 ASSERT(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX))); 361 ASSERT(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
349 } 362 }
350 363
351 364
352 bool Operand::IsImmediate() const { 365 bool Operand::IsImmediate() const {
353 return reg_.Is(NoReg); 366 return reg_.Is(NoReg);
354 } 367 }
355 368
356 369
357 bool Operand::IsShiftedRegister() const { 370 bool Operand::IsShiftedRegister() const {
358 return reg_.IsValid() && (shift_ != NO_SHIFT); 371 return reg_.IsValid() && (shift_ != NO_SHIFT);
359 } 372 }
360 373
361 374
362 bool Operand::IsExtendedRegister() const { 375 bool Operand::IsExtendedRegister() const {
363 return reg_.IsValid() && (extend_ != NO_EXTEND); 376 return reg_.IsValid() && (extend_ != NO_EXTEND);
364 } 377 }
365 378
366 379
367 bool Operand::IsZero() const { 380 bool Operand::IsZero() const {
368 if (IsImmediate()) { 381 if (IsImmediate()) {
369 return immediate() == 0; 382 return ImmediateValue() == 0;
370 } else { 383 } else {
371 return reg().IsZero(); 384 return reg().IsZero();
372 } 385 }
373 } 386 }
374 387
375 388
376 Operand Operand::ToExtendedRegister() const { 389 Operand Operand::ToExtendedRegister() const {
377 ASSERT(IsShiftedRegister()); 390 ASSERT(IsShiftedRegister());
378 ASSERT((shift_ == LSL) && (shift_amount_ <= 4)); 391 ASSERT((shift_ == LSL) && (shift_amount_ <= 4));
379 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); 392 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
380 } 393 }
381 394
382 395
383 int64_t Operand::immediate() const { 396 Immediate Operand::immediate() const {
384 ASSERT(IsImmediate()); 397 ASSERT(IsImmediate());
385 return immediate_; 398 return immediate_;
386 } 399 }
387 400
388 401
402 int64_t Operand::ImmediateValue() const {
403 ASSERT(IsImmediate());
404 return immediate_.value();
405 }
406
407
389 Register Operand::reg() const { 408 Register Operand::reg() const {
390 ASSERT(IsShiftedRegister() || IsExtendedRegister()); 409 ASSERT(IsShiftedRegister() || IsExtendedRegister());
391 return reg_; 410 return reg_;
392 } 411 }
393 412
394 413
395 Shift Operand::shift() const { 414 Shift Operand::shift() const {
396 ASSERT(IsShiftedRegister()); 415 ASSERT(IsShiftedRegister());
397 return shift_; 416 return shift_;
398 } 417 }
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
466 ASSERT(regoffset.Is64Bits() && !regoffset.IsSP()); 485 ASSERT(regoffset.Is64Bits() && !regoffset.IsSP());
467 ASSERT(shift == LSL); 486 ASSERT(shift == LSL);
468 } 487 }
469 488
470 489
471 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) 490 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
472 : base_(base), addrmode_(addrmode) { 491 : base_(base), addrmode_(addrmode) {
473 ASSERT(base.Is64Bits() && !base.IsZero()); 492 ASSERT(base.Is64Bits() && !base.IsZero());
474 493
475 if (offset.IsImmediate()) { 494 if (offset.IsImmediate()) {
476 offset_ = offset.immediate(); 495 offset_ = offset.ImmediateValue();
477 496
478 regoffset_ = NoReg; 497 regoffset_ = NoReg;
479 } else if (offset.IsShiftedRegister()) { 498 } else if (offset.IsShiftedRegister()) {
480 ASSERT(addrmode == Offset); 499 ASSERT(addrmode == Offset);
481 500
482 regoffset_ = offset.reg(); 501 regoffset_ = offset.reg();
483 shift_= offset.shift(); 502 shift_= offset.shift();
484 shift_amount_ = offset.shift_amount(); 503 shift_amount_ = offset.shift_amount();
485 504
486 extend_ = NO_EXTEND; 505 extend_ = NO_EXTEND;
(...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after
937 USE(rt2); 956 USE(rt2);
938 if (rt.IsRegister()) { 957 if (rt.IsRegister()) {
939 return rt.Is64Bits() ? STNP_x : STNP_w; 958 return rt.Is64Bits() ? STNP_x : STNP_w;
940 } else { 959 } else {
941 ASSERT(rt.IsFPRegister()); 960 ASSERT(rt.IsFPRegister());
942 return rt.Is64Bits() ? STNP_d : STNP_s; 961 return rt.Is64Bits() ? STNP_d : STNP_s;
943 } 962 }
944 } 963 }
945 964
946 965
966 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) {
967 if (rt.IsRegister()) {
968 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
969 } else {
970 ASSERT(rt.IsFPRegister());
971 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
972 }
973 }
974
975
947 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { 976 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
948 ASSERT(kStartOfLabelLinkChain == 0); 977 ASSERT(kStartOfLabelLinkChain == 0);
949 int offset = LinkAndGetByteOffsetTo(label); 978 int offset = LinkAndGetByteOffsetTo(label);
950 ASSERT(IsAligned(offset, kInstructionSize)); 979 ASSERT(IsAligned(offset, kInstructionSize));
951 return offset >> kInstructionSizeLog2; 980 return offset >> kInstructionSizeLog2;
952 } 981 }
953 982
954 983
955 Instr Assembler::Flags(FlagsUpdate S) { 984 Instr Assembler::Flags(FlagsUpdate S) {
956 if (S == SetFlags) { 985 if (S == SetFlags) {
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after
1193 ASSERT(is_uint6(scale)); 1222 ASSERT(is_uint6(scale));
1194 return scale << FPScale_offset; 1223 return scale << FPScale_offset;
1195 } 1224 }
1196 1225
1197 1226
1198 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { 1227 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
1199 return reg.Is64Bits() ? xzr : wzr; 1228 return reg.Is64Bits() ? xzr : wzr;
1200 } 1229 }
1201 1230
1202 1231
1203 void Assembler::LoadRelocated(const CPURegister& rt, const Operand& operand) {
1204 LoadRelocatedValue(rt, operand, LDR_x_lit);
1205 }
1206
1207
1208 inline void Assembler::CheckBufferSpace() { 1232 inline void Assembler::CheckBufferSpace() {
1209 ASSERT(pc_ < (buffer_ + buffer_size_)); 1233 ASSERT(pc_ < (buffer_ + buffer_size_));
1210 if (buffer_space() < kGap) { 1234 if (buffer_space() < kGap) {
1211 GrowBuffer(); 1235 GrowBuffer();
1212 } 1236 }
1213 } 1237 }
1214 1238
1215 1239
1216 inline void Assembler::CheckBuffer() { 1240 inline void Assembler::CheckBuffer() {
1217 CheckBufferSpace(); 1241 CheckBufferSpace();
(...skipping 13 matching lines...) Expand all
1231 1255
1232 1256
1233 void Assembler::ClearRecordedAstId() { 1257 void Assembler::ClearRecordedAstId() {
1234 recorded_ast_id_ = TypeFeedbackId::None(); 1258 recorded_ast_id_ = TypeFeedbackId::None();
1235 } 1259 }
1236 1260
1237 1261
1238 } } // namespace v8::internal 1262 } } // namespace v8::internal
1239 1263
1240 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ 1264 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698