Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(223)

Side by Side Diff: src/arm64/assembler-arm64-inl.h

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/assembler-arm64.cc ('k') | src/arm64/builtins-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ 6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_
7 7
8 #include "src/arm64/assembler-arm64.h" 8 #include "src/arm64/assembler-arm64.h"
9 #include "src/assembler.h" 9 #include "src/assembler.h"
10 #include "src/debug.h" 10 #include "src/debug.h"
11 11
12 12
13 namespace v8 { 13 namespace v8 {
14 namespace internal { 14 namespace internal {
15 15
16 16
17 bool CpuFeatures::SupportsCrankshaft() { return true; } 17 bool CpuFeatures::SupportsCrankshaft() { return true; }
18 18
19 19
20 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) { 20 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
21 UNIMPLEMENTED(); 21 UNIMPLEMENTED();
22 } 22 }
23 23
24 24
25 void RelocInfo::set_target_address(Address target, 25 void RelocInfo::set_target_address(Address target,
26 WriteBarrierMode write_barrier_mode, 26 WriteBarrierMode write_barrier_mode,
27 ICacheFlushMode icache_flush_mode) { 27 ICacheFlushMode icache_flush_mode) {
28 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); 28 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
29 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode); 29 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
30 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL && 30 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
31 IsCodeTarget(rmode_)) { 31 IsCodeTarget(rmode_)) {
32 Object* target_code = Code::GetCodeFromTargetAddress(target); 32 Object* target_code = Code::GetCodeFromTargetAddress(target);
33 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 33 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
34 host(), this, HeapObject::cast(target_code)); 34 host(), this, HeapObject::cast(target_code));
35 } 35 }
36 } 36 }
37 37
38 38
39 inline unsigned CPURegister::code() const { 39 inline unsigned CPURegister::code() const {
40 ASSERT(IsValid()); 40 DCHECK(IsValid());
41 return reg_code; 41 return reg_code;
42 } 42 }
43 43
44 44
45 inline CPURegister::RegisterType CPURegister::type() const { 45 inline CPURegister::RegisterType CPURegister::type() const {
46 ASSERT(IsValidOrNone()); 46 DCHECK(IsValidOrNone());
47 return reg_type; 47 return reg_type;
48 } 48 }
49 49
50 50
51 inline RegList CPURegister::Bit() const { 51 inline RegList CPURegister::Bit() const {
52 ASSERT(reg_code < (sizeof(RegList) * kBitsPerByte)); 52 DCHECK(reg_code < (sizeof(RegList) * kBitsPerByte));
53 return IsValid() ? 1UL << reg_code : 0; 53 return IsValid() ? 1UL << reg_code : 0;
54 } 54 }
55 55
56 56
57 inline unsigned CPURegister::SizeInBits() const { 57 inline unsigned CPURegister::SizeInBits() const {
58 ASSERT(IsValid()); 58 DCHECK(IsValid());
59 return reg_size; 59 return reg_size;
60 } 60 }
61 61
62 62
63 inline int CPURegister::SizeInBytes() const { 63 inline int CPURegister::SizeInBytes() const {
64 ASSERT(IsValid()); 64 DCHECK(IsValid());
65 ASSERT(SizeInBits() % 8 == 0); 65 DCHECK(SizeInBits() % 8 == 0);
66 return reg_size / 8; 66 return reg_size / 8;
67 } 67 }
68 68
69 69
70 inline bool CPURegister::Is32Bits() const { 70 inline bool CPURegister::Is32Bits() const {
71 ASSERT(IsValid()); 71 DCHECK(IsValid());
72 return reg_size == 32; 72 return reg_size == 32;
73 } 73 }
74 74
75 75
76 inline bool CPURegister::Is64Bits() const { 76 inline bool CPURegister::Is64Bits() const {
77 ASSERT(IsValid()); 77 DCHECK(IsValid());
78 return reg_size == 64; 78 return reg_size == 64;
79 } 79 }
80 80
81 81
82 inline bool CPURegister::IsValid() const { 82 inline bool CPURegister::IsValid() const {
83 if (IsValidRegister() || IsValidFPRegister()) { 83 if (IsValidRegister() || IsValidFPRegister()) {
84 ASSERT(!IsNone()); 84 DCHECK(!IsNone());
85 return true; 85 return true;
86 } else { 86 } else {
87 ASSERT(IsNone()); 87 DCHECK(IsNone());
88 return false; 88 return false;
89 } 89 }
90 } 90 }
91 91
92 92
93 inline bool CPURegister::IsValidRegister() const { 93 inline bool CPURegister::IsValidRegister() const {
94 return IsRegister() && 94 return IsRegister() &&
95 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && 95 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) &&
96 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); 96 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode));
97 } 97 }
98 98
99 99
100 inline bool CPURegister::IsValidFPRegister() const { 100 inline bool CPURegister::IsValidFPRegister() const {
101 return IsFPRegister() && 101 return IsFPRegister() &&
102 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && 102 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) &&
103 (reg_code < kNumberOfFPRegisters); 103 (reg_code < kNumberOfFPRegisters);
104 } 104 }
105 105
106 106
107 inline bool CPURegister::IsNone() const { 107 inline bool CPURegister::IsNone() const {
108 // kNoRegister types should always have size 0 and code 0. 108 // kNoRegister types should always have size 0 and code 0.
109 ASSERT((reg_type != kNoRegister) || (reg_code == 0)); 109 DCHECK((reg_type != kNoRegister) || (reg_code == 0));
110 ASSERT((reg_type != kNoRegister) || (reg_size == 0)); 110 DCHECK((reg_type != kNoRegister) || (reg_size == 0));
111 111
112 return reg_type == kNoRegister; 112 return reg_type == kNoRegister;
113 } 113 }
114 114
115 115
116 inline bool CPURegister::Is(const CPURegister& other) const { 116 inline bool CPURegister::Is(const CPURegister& other) const {
117 ASSERT(IsValidOrNone() && other.IsValidOrNone()); 117 DCHECK(IsValidOrNone() && other.IsValidOrNone());
118 return Aliases(other) && (reg_size == other.reg_size); 118 return Aliases(other) && (reg_size == other.reg_size);
119 } 119 }
120 120
121 121
122 inline bool CPURegister::Aliases(const CPURegister& other) const { 122 inline bool CPURegister::Aliases(const CPURegister& other) const {
123 ASSERT(IsValidOrNone() && other.IsValidOrNone()); 123 DCHECK(IsValidOrNone() && other.IsValidOrNone());
124 return (reg_code == other.reg_code) && (reg_type == other.reg_type); 124 return (reg_code == other.reg_code) && (reg_type == other.reg_type);
125 } 125 }
126 126
127 127
128 inline bool CPURegister::IsRegister() const { 128 inline bool CPURegister::IsRegister() const {
129 return reg_type == kRegister; 129 return reg_type == kRegister;
130 } 130 }
131 131
132 132
133 inline bool CPURegister::IsFPRegister() const { 133 inline bool CPURegister::IsFPRegister() const {
134 return reg_type == kFPRegister; 134 return reg_type == kFPRegister;
135 } 135 }
136 136
137 137
138 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { 138 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const {
139 return (reg_size == other.reg_size) && (reg_type == other.reg_type); 139 return (reg_size == other.reg_size) && (reg_type == other.reg_type);
140 } 140 }
141 141
142 142
143 inline bool CPURegister::IsValidOrNone() const { 143 inline bool CPURegister::IsValidOrNone() const {
144 return IsValid() || IsNone(); 144 return IsValid() || IsNone();
145 } 145 }
146 146
147 147
148 inline bool CPURegister::IsZero() const { 148 inline bool CPURegister::IsZero() const {
149 ASSERT(IsValid()); 149 DCHECK(IsValid());
150 return IsRegister() && (reg_code == kZeroRegCode); 150 return IsRegister() && (reg_code == kZeroRegCode);
151 } 151 }
152 152
153 153
154 inline bool CPURegister::IsSP() const { 154 inline bool CPURegister::IsSP() const {
155 ASSERT(IsValid()); 155 DCHECK(IsValid());
156 return IsRegister() && (reg_code == kSPRegInternalCode); 156 return IsRegister() && (reg_code == kSPRegInternalCode);
157 } 157 }
158 158
159 159
160 inline void CPURegList::Combine(const CPURegList& other) { 160 inline void CPURegList::Combine(const CPURegList& other) {
161 ASSERT(IsValid()); 161 DCHECK(IsValid());
162 ASSERT(other.type() == type_); 162 DCHECK(other.type() == type_);
163 ASSERT(other.RegisterSizeInBits() == size_); 163 DCHECK(other.RegisterSizeInBits() == size_);
164 list_ |= other.list(); 164 list_ |= other.list();
165 } 165 }
166 166
167 167
168 inline void CPURegList::Remove(const CPURegList& other) { 168 inline void CPURegList::Remove(const CPURegList& other) {
169 ASSERT(IsValid()); 169 DCHECK(IsValid());
170 if (other.type() == type_) { 170 if (other.type() == type_) {
171 list_ &= ~other.list(); 171 list_ &= ~other.list();
172 } 172 }
173 } 173 }
174 174
175 175
176 inline void CPURegList::Combine(const CPURegister& other) { 176 inline void CPURegList::Combine(const CPURegister& other) {
177 ASSERT(other.type() == type_); 177 DCHECK(other.type() == type_);
178 ASSERT(other.SizeInBits() == size_); 178 DCHECK(other.SizeInBits() == size_);
179 Combine(other.code()); 179 Combine(other.code());
180 } 180 }
181 181
182 182
183 inline void CPURegList::Remove(const CPURegister& other1, 183 inline void CPURegList::Remove(const CPURegister& other1,
184 const CPURegister& other2, 184 const CPURegister& other2,
185 const CPURegister& other3, 185 const CPURegister& other3,
186 const CPURegister& other4) { 186 const CPURegister& other4) {
187 if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code()); 187 if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code());
188 if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code()); 188 if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code());
189 if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code()); 189 if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code());
190 if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code()); 190 if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code());
191 } 191 }
192 192
193 193
194 inline void CPURegList::Combine(int code) { 194 inline void CPURegList::Combine(int code) {
195 ASSERT(IsValid()); 195 DCHECK(IsValid());
196 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); 196 DCHECK(CPURegister::Create(code, size_, type_).IsValid());
197 list_ |= (1UL << code); 197 list_ |= (1UL << code);
198 } 198 }
199 199
200 200
201 inline void CPURegList::Remove(int code) { 201 inline void CPURegList::Remove(int code) {
202 ASSERT(IsValid()); 202 DCHECK(IsValid());
203 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); 203 DCHECK(CPURegister::Create(code, size_, type_).IsValid());
204 list_ &= ~(1UL << code); 204 list_ &= ~(1UL << code);
205 } 205 }
206 206
207 207
208 inline Register Register::XRegFromCode(unsigned code) { 208 inline Register Register::XRegFromCode(unsigned code) {
209 if (code == kSPRegInternalCode) { 209 if (code == kSPRegInternalCode) {
210 return csp; 210 return csp;
211 } else { 211 } else {
212 ASSERT(code < kNumberOfRegisters); 212 DCHECK(code < kNumberOfRegisters);
213 return Register::Create(code, kXRegSizeInBits); 213 return Register::Create(code, kXRegSizeInBits);
214 } 214 }
215 } 215 }
216 216
217 217
218 inline Register Register::WRegFromCode(unsigned code) { 218 inline Register Register::WRegFromCode(unsigned code) {
219 if (code == kSPRegInternalCode) { 219 if (code == kSPRegInternalCode) {
220 return wcsp; 220 return wcsp;
221 } else { 221 } else {
222 ASSERT(code < kNumberOfRegisters); 222 DCHECK(code < kNumberOfRegisters);
223 return Register::Create(code, kWRegSizeInBits); 223 return Register::Create(code, kWRegSizeInBits);
224 } 224 }
225 } 225 }
226 226
227 227
228 inline FPRegister FPRegister::SRegFromCode(unsigned code) { 228 inline FPRegister FPRegister::SRegFromCode(unsigned code) {
229 ASSERT(code < kNumberOfFPRegisters); 229 DCHECK(code < kNumberOfFPRegisters);
230 return FPRegister::Create(code, kSRegSizeInBits); 230 return FPRegister::Create(code, kSRegSizeInBits);
231 } 231 }
232 232
233 233
234 inline FPRegister FPRegister::DRegFromCode(unsigned code) { 234 inline FPRegister FPRegister::DRegFromCode(unsigned code) {
235 ASSERT(code < kNumberOfFPRegisters); 235 DCHECK(code < kNumberOfFPRegisters);
236 return FPRegister::Create(code, kDRegSizeInBits); 236 return FPRegister::Create(code, kDRegSizeInBits);
237 } 237 }
238 238
239 239
240 inline Register CPURegister::W() const { 240 inline Register CPURegister::W() const {
241 ASSERT(IsValidRegister()); 241 DCHECK(IsValidRegister());
242 return Register::WRegFromCode(reg_code); 242 return Register::WRegFromCode(reg_code);
243 } 243 }
244 244
245 245
246 inline Register CPURegister::X() const { 246 inline Register CPURegister::X() const {
247 ASSERT(IsValidRegister()); 247 DCHECK(IsValidRegister());
248 return Register::XRegFromCode(reg_code); 248 return Register::XRegFromCode(reg_code);
249 } 249 }
250 250
251 251
252 inline FPRegister CPURegister::S() const { 252 inline FPRegister CPURegister::S() const {
253 ASSERT(IsValidFPRegister()); 253 DCHECK(IsValidFPRegister());
254 return FPRegister::SRegFromCode(reg_code); 254 return FPRegister::SRegFromCode(reg_code);
255 } 255 }
256 256
257 257
258 inline FPRegister CPURegister::D() const { 258 inline FPRegister CPURegister::D() const {
259 ASSERT(IsValidFPRegister()); 259 DCHECK(IsValidFPRegister());
260 return FPRegister::DRegFromCode(reg_code); 260 return FPRegister::DRegFromCode(reg_code);
261 } 261 }
262 262
263 263
264 // Immediate. 264 // Immediate.
265 // Default initializer is for int types 265 // Default initializer is for int types
266 template<typename T> 266 template<typename T>
267 struct ImmediateInitializer { 267 struct ImmediateInitializer {
268 static const bool kIsIntType = true; 268 static const bool kIsIntType = true;
269 static inline RelocInfo::Mode rmode_for(T) { 269 static inline RelocInfo::Mode rmode_for(T) {
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
334 : immediate_(t, rmode), 334 : immediate_(t, rmode),
335 reg_(NoReg) {} 335 reg_(NoReg) {}
336 336
337 337
338 Operand::Operand(Register reg, Shift shift, unsigned shift_amount) 338 Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
339 : immediate_(0), 339 : immediate_(0),
340 reg_(reg), 340 reg_(reg),
341 shift_(shift), 341 shift_(shift),
342 extend_(NO_EXTEND), 342 extend_(NO_EXTEND),
343 shift_amount_(shift_amount) { 343 shift_amount_(shift_amount) {
344 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSizeInBits)); 344 DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
345 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSizeInBits)); 345 DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
346 ASSERT(!reg.IsSP()); 346 DCHECK(!reg.IsSP());
347 } 347 }
348 348
349 349
350 Operand::Operand(Register reg, Extend extend, unsigned shift_amount) 350 Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
351 : immediate_(0), 351 : immediate_(0),
352 reg_(reg), 352 reg_(reg),
353 shift_(NO_SHIFT), 353 shift_(NO_SHIFT),
354 extend_(extend), 354 extend_(extend),
355 shift_amount_(shift_amount) { 355 shift_amount_(shift_amount) {
356 ASSERT(reg.IsValid()); 356 DCHECK(reg.IsValid());
357 ASSERT(shift_amount <= 4); 357 DCHECK(shift_amount <= 4);
358 ASSERT(!reg.IsSP()); 358 DCHECK(!reg.IsSP());
359 359
360 // Extend modes SXTX and UXTX require a 64-bit register. 360 // Extend modes SXTX and UXTX require a 64-bit register.
361 ASSERT(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX))); 361 DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
362 } 362 }
363 363
364 364
365 bool Operand::IsImmediate() const { 365 bool Operand::IsImmediate() const {
366 return reg_.Is(NoReg); 366 return reg_.Is(NoReg);
367 } 367 }
368 368
369 369
370 bool Operand::IsShiftedRegister() const { 370 bool Operand::IsShiftedRegister() const {
371 return reg_.IsValid() && (shift_ != NO_SHIFT); 371 return reg_.IsValid() && (shift_ != NO_SHIFT);
372 } 372 }
373 373
374 374
375 bool Operand::IsExtendedRegister() const { 375 bool Operand::IsExtendedRegister() const {
376 return reg_.IsValid() && (extend_ != NO_EXTEND); 376 return reg_.IsValid() && (extend_ != NO_EXTEND);
377 } 377 }
378 378
379 379
380 bool Operand::IsZero() const { 380 bool Operand::IsZero() const {
381 if (IsImmediate()) { 381 if (IsImmediate()) {
382 return ImmediateValue() == 0; 382 return ImmediateValue() == 0;
383 } else { 383 } else {
384 return reg().IsZero(); 384 return reg().IsZero();
385 } 385 }
386 } 386 }
387 387
388 388
389 Operand Operand::ToExtendedRegister() const { 389 Operand Operand::ToExtendedRegister() const {
390 ASSERT(IsShiftedRegister()); 390 DCHECK(IsShiftedRegister());
391 ASSERT((shift_ == LSL) && (shift_amount_ <= 4)); 391 DCHECK((shift_ == LSL) && (shift_amount_ <= 4));
392 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); 392 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
393 } 393 }
394 394
395 395
396 Immediate Operand::immediate() const { 396 Immediate Operand::immediate() const {
397 ASSERT(IsImmediate()); 397 DCHECK(IsImmediate());
398 return immediate_; 398 return immediate_;
399 } 399 }
400 400
401 401
402 int64_t Operand::ImmediateValue() const { 402 int64_t Operand::ImmediateValue() const {
403 ASSERT(IsImmediate()); 403 DCHECK(IsImmediate());
404 return immediate_.value(); 404 return immediate_.value();
405 } 405 }
406 406
407 407
408 Register Operand::reg() const { 408 Register Operand::reg() const {
409 ASSERT(IsShiftedRegister() || IsExtendedRegister()); 409 DCHECK(IsShiftedRegister() || IsExtendedRegister());
410 return reg_; 410 return reg_;
411 } 411 }
412 412
413 413
414 Shift Operand::shift() const { 414 Shift Operand::shift() const {
415 ASSERT(IsShiftedRegister()); 415 DCHECK(IsShiftedRegister());
416 return shift_; 416 return shift_;
417 } 417 }
418 418
419 419
420 Extend Operand::extend() const { 420 Extend Operand::extend() const {
421 ASSERT(IsExtendedRegister()); 421 DCHECK(IsExtendedRegister());
422 return extend_; 422 return extend_;
423 } 423 }
424 424
425 425
426 unsigned Operand::shift_amount() const { 426 unsigned Operand::shift_amount() const {
427 ASSERT(IsShiftedRegister() || IsExtendedRegister()); 427 DCHECK(IsShiftedRegister() || IsExtendedRegister());
428 return shift_amount_; 428 return shift_amount_;
429 } 429 }
430 430
431 431
432 Operand Operand::UntagSmi(Register smi) { 432 Operand Operand::UntagSmi(Register smi) {
433 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift + 433 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift +
434 kSmiValueSize)); 434 kSmiValueSize));
435 ASSERT(smi.Is64Bits()); 435 DCHECK(smi.Is64Bits());
436 return Operand(smi, ASR, kSmiShift); 436 return Operand(smi, ASR, kSmiShift);
437 } 437 }
438 438
439 439
440 Operand Operand::UntagSmiAndScale(Register smi, int scale) { 440 Operand Operand::UntagSmiAndScale(Register smi, int scale) {
441 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift + 441 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift +
442 kSmiValueSize)); 442 kSmiValueSize));
443 ASSERT(smi.Is64Bits()); 443 DCHECK(smi.Is64Bits());
444 ASSERT((scale >= 0) && (scale <= (64 - kSmiValueSize))); 444 DCHECK((scale >= 0) && (scale <= (64 - kSmiValueSize)));
445 if (scale > kSmiShift) { 445 if (scale > kSmiShift) {
446 return Operand(smi, LSL, scale - kSmiShift); 446 return Operand(smi, LSL, scale - kSmiShift);
447 } else if (scale < kSmiShift) { 447 } else if (scale < kSmiShift) {
448 return Operand(smi, ASR, kSmiShift - scale); 448 return Operand(smi, ASR, kSmiShift - scale);
449 } 449 }
450 return Operand(smi); 450 return Operand(smi);
451 } 451 }
452 452
453 453
454 MemOperand::MemOperand() 454 MemOperand::MemOperand()
455 : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset), 455 : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset),
456 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) { 456 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
457 } 457 }
458 458
459 459
460 MemOperand::MemOperand(Register base, ptrdiff_t offset, AddrMode addrmode) 460 MemOperand::MemOperand(Register base, ptrdiff_t offset, AddrMode addrmode)
461 : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode), 461 : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode),
462 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) { 462 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
463 ASSERT(base.Is64Bits() && !base.IsZero()); 463 DCHECK(base.Is64Bits() && !base.IsZero());
464 } 464 }
465 465
466 466
467 MemOperand::MemOperand(Register base, 467 MemOperand::MemOperand(Register base,
468 Register regoffset, 468 Register regoffset,
469 Extend extend, 469 Extend extend,
470 unsigned shift_amount) 470 unsigned shift_amount)
471 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset), 471 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
472 shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) { 472 shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) {
473 ASSERT(base.Is64Bits() && !base.IsZero()); 473 DCHECK(base.Is64Bits() && !base.IsZero());
474 ASSERT(!regoffset.IsSP()); 474 DCHECK(!regoffset.IsSP());
475 ASSERT((extend == UXTW) || (extend == SXTW) || (extend == SXTX)); 475 DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
476 476
477 // SXTX extend mode requires a 64-bit offset register. 477 // SXTX extend mode requires a 64-bit offset register.
478 ASSERT(regoffset.Is64Bits() || (extend != SXTX)); 478 DCHECK(regoffset.Is64Bits() || (extend != SXTX));
479 } 479 }
480 480
481 481
482 MemOperand::MemOperand(Register base, 482 MemOperand::MemOperand(Register base,
483 Register regoffset, 483 Register regoffset,
484 Shift shift, 484 Shift shift,
485 unsigned shift_amount) 485 unsigned shift_amount)
486 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset), 486 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
487 shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) { 487 shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) {
488 ASSERT(base.Is64Bits() && !base.IsZero()); 488 DCHECK(base.Is64Bits() && !base.IsZero());
489 ASSERT(regoffset.Is64Bits() && !regoffset.IsSP()); 489 DCHECK(regoffset.Is64Bits() && !regoffset.IsSP());
490 ASSERT(shift == LSL); 490 DCHECK(shift == LSL);
491 } 491 }
492 492
493 493
494 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) 494 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
495 : base_(base), addrmode_(addrmode) { 495 : base_(base), addrmode_(addrmode) {
496 ASSERT(base.Is64Bits() && !base.IsZero()); 496 DCHECK(base.Is64Bits() && !base.IsZero());
497 497
498 if (offset.IsImmediate()) { 498 if (offset.IsImmediate()) {
499 offset_ = offset.ImmediateValue(); 499 offset_ = offset.ImmediateValue();
500 500
501 regoffset_ = NoReg; 501 regoffset_ = NoReg;
502 } else if (offset.IsShiftedRegister()) { 502 } else if (offset.IsShiftedRegister()) {
503 ASSERT(addrmode == Offset); 503 DCHECK(addrmode == Offset);
504 504
505 regoffset_ = offset.reg(); 505 regoffset_ = offset.reg();
506 shift_= offset.shift(); 506 shift_= offset.shift();
507 shift_amount_ = offset.shift_amount(); 507 shift_amount_ = offset.shift_amount();
508 508
509 extend_ = NO_EXTEND; 509 extend_ = NO_EXTEND;
510 offset_ = 0; 510 offset_ = 0;
511 511
512 // These assertions match those in the shifted-register constructor. 512 // These assertions match those in the shifted-register constructor.
513 ASSERT(regoffset_.Is64Bits() && !regoffset_.IsSP()); 513 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP());
514 ASSERT(shift_ == LSL); 514 DCHECK(shift_ == LSL);
515 } else { 515 } else {
516 ASSERT(offset.IsExtendedRegister()); 516 DCHECK(offset.IsExtendedRegister());
517 ASSERT(addrmode == Offset); 517 DCHECK(addrmode == Offset);
518 518
519 regoffset_ = offset.reg(); 519 regoffset_ = offset.reg();
520 extend_ = offset.extend(); 520 extend_ = offset.extend();
521 shift_amount_ = offset.shift_amount(); 521 shift_amount_ = offset.shift_amount();
522 522
523 shift_= NO_SHIFT; 523 shift_= NO_SHIFT;
524 offset_ = 0; 524 offset_ = 0;
525 525
526 // These assertions match those in the extended-register constructor. 526 // These assertions match those in the extended-register constructor.
527 ASSERT(!regoffset_.IsSP()); 527 DCHECK(!regoffset_.IsSP());
528 ASSERT((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX)); 528 DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
529 ASSERT((regoffset_.Is64Bits() || (extend_ != SXTX))); 529 DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX)));
530 } 530 }
531 } 531 }
532 532
533 bool MemOperand::IsImmediateOffset() const { 533 bool MemOperand::IsImmediateOffset() const {
534 return (addrmode_ == Offset) && regoffset_.Is(NoReg); 534 return (addrmode_ == Offset) && regoffset_.Is(NoReg);
535 } 535 }
536 536
537 537
538 bool MemOperand::IsRegisterOffset() const { 538 bool MemOperand::IsRegisterOffset() const {
539 return (addrmode_ == Offset) && !regoffset_.Is(NoReg); 539 return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
540 } 540 }
541 541
542 542
543 bool MemOperand::IsPreIndex() const { 543 bool MemOperand::IsPreIndex() const {
544 return addrmode_ == PreIndex; 544 return addrmode_ == PreIndex;
545 } 545 }
546 546
547 547
548 bool MemOperand::IsPostIndex() const { 548 bool MemOperand::IsPostIndex() const {
549 return addrmode_ == PostIndex; 549 return addrmode_ == PostIndex;
550 } 550 }
551 551
552 Operand MemOperand::OffsetAsOperand() const { 552 Operand MemOperand::OffsetAsOperand() const {
553 if (IsImmediateOffset()) { 553 if (IsImmediateOffset()) {
554 return offset(); 554 return offset();
555 } else { 555 } else {
556 ASSERT(IsRegisterOffset()); 556 DCHECK(IsRegisterOffset());
557 if (extend() == NO_EXTEND) { 557 if (extend() == NO_EXTEND) {
558 return Operand(regoffset(), shift(), shift_amount()); 558 return Operand(regoffset(), shift(), shift_amount());
559 } else { 559 } else {
560 return Operand(regoffset(), extend(), shift_amount()); 560 return Operand(regoffset(), extend(), shift_amount());
561 } 561 }
562 } 562 }
563 } 563 }
564 564
565 565
566 void Assembler::Unreachable() { 566 void Assembler::Unreachable() {
567 #ifdef USE_SIMULATOR 567 #ifdef USE_SIMULATOR
568 debug("UNREACHABLE", __LINE__, BREAK); 568 debug("UNREACHABLE", __LINE__, BREAK);
569 #else 569 #else
570 // Crash by branching to 0. lr now points near the fault. 570 // Crash by branching to 0. lr now points near the fault.
571 Emit(BLR | Rn(xzr)); 571 Emit(BLR | Rn(xzr));
572 #endif 572 #endif
573 } 573 }
574 574
575 575
576 Address Assembler::target_pointer_address_at(Address pc) { 576 Address Assembler::target_pointer_address_at(Address pc) {
577 Instruction* instr = reinterpret_cast<Instruction*>(pc); 577 Instruction* instr = reinterpret_cast<Instruction*>(pc);
578 ASSERT(instr->IsLdrLiteralX()); 578 DCHECK(instr->IsLdrLiteralX());
579 return reinterpret_cast<Address>(instr->ImmPCOffsetTarget()); 579 return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
580 } 580 }
581 581
582 582
583 // Read/Modify the code target address in the branch/call instruction at pc. 583 // Read/Modify the code target address in the branch/call instruction at pc.
584 Address Assembler::target_address_at(Address pc, 584 Address Assembler::target_address_at(Address pc,
585 ConstantPoolArray* constant_pool) { 585 ConstantPoolArray* constant_pool) {
586 return Memory::Address_at(target_pointer_address_at(pc)); 586 return Memory::Address_at(target_pointer_address_at(pc));
587 } 587 }
588 588
589 589
590 Address Assembler::target_address_at(Address pc, Code* code) { 590 Address Assembler::target_address_at(Address pc, Code* code) {
591 ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; 591 ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
592 return target_address_at(pc, constant_pool); 592 return target_address_at(pc, constant_pool);
593 } 593 }
594 594
595 595
596 Address Assembler::target_address_from_return_address(Address pc) { 596 Address Assembler::target_address_from_return_address(Address pc) {
597 // Returns the address of the call target from the return address that will 597 // Returns the address of the call target from the return address that will
598 // be returned to after a call. 598 // be returned to after a call.
599 // Call sequence on ARM64 is: 599 // Call sequence on ARM64 is:
600 // ldr ip0, #... @ load from literal pool 600 // ldr ip0, #... @ load from literal pool
601 // blr ip0 601 // blr ip0
602 Address candidate = pc - 2 * kInstructionSize; 602 Address candidate = pc - 2 * kInstructionSize;
603 Instruction* instr = reinterpret_cast<Instruction*>(candidate); 603 Instruction* instr = reinterpret_cast<Instruction*>(candidate);
604 USE(instr); 604 USE(instr);
605 ASSERT(instr->IsLdrLiteralX()); 605 DCHECK(instr->IsLdrLiteralX());
606 return candidate; 606 return candidate;
607 } 607 }
608 608
609 609
610 Address Assembler::return_address_from_call_start(Address pc) { 610 Address Assembler::return_address_from_call_start(Address pc) {
611 // The call, generated by MacroAssembler::Call, is one of two possible 611 // The call, generated by MacroAssembler::Call, is one of two possible
612 // sequences: 612 // sequences:
613 // 613 //
614 // Without relocation: 614 // Without relocation:
615 // movz temp, #(target & 0x000000000000ffff) 615 // movz temp, #(target & 0x000000000000ffff)
616 // movk temp, #(target & 0x00000000ffff0000) 616 // movk temp, #(target & 0x00000000ffff0000)
617 // movk temp, #(target & 0x0000ffff00000000) 617 // movk temp, #(target & 0x0000ffff00000000)
618 // blr temp 618 // blr temp
619 // 619 //
620 // With relocation: 620 // With relocation:
621 // ldr temp, =target 621 // ldr temp, =target
622 // blr temp 622 // blr temp
623 // 623 //
624 // The return address is immediately after the blr instruction in both cases, 624 // The return address is immediately after the blr instruction in both cases,
625 // so it can be found by adding the call size to the address at the start of 625 // so it can be found by adding the call size to the address at the start of
626 // the call sequence. 626 // the call sequence.
627 STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 4 * kInstructionSize); 627 STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 4 * kInstructionSize);
628 STATIC_ASSERT(Assembler::kCallSizeWithRelocation == 2 * kInstructionSize); 628 STATIC_ASSERT(Assembler::kCallSizeWithRelocation == 2 * kInstructionSize);
629 629
630 Instruction* instr = reinterpret_cast<Instruction*>(pc); 630 Instruction* instr = reinterpret_cast<Instruction*>(pc);
631 if (instr->IsMovz()) { 631 if (instr->IsMovz()) {
632 // Verify the instruction sequence. 632 // Verify the instruction sequence.
633 ASSERT(instr->following(1)->IsMovk()); 633 DCHECK(instr->following(1)->IsMovk());
634 ASSERT(instr->following(2)->IsMovk()); 634 DCHECK(instr->following(2)->IsMovk());
635 ASSERT(instr->following(3)->IsBranchAndLinkToRegister()); 635 DCHECK(instr->following(3)->IsBranchAndLinkToRegister());
636 return pc + Assembler::kCallSizeWithoutRelocation; 636 return pc + Assembler::kCallSizeWithoutRelocation;
637 } else { 637 } else {
638 // Verify the instruction sequence. 638 // Verify the instruction sequence.
639 ASSERT(instr->IsLdrLiteralX()); 639 DCHECK(instr->IsLdrLiteralX());
640 ASSERT(instr->following(1)->IsBranchAndLinkToRegister()); 640 DCHECK(instr->following(1)->IsBranchAndLinkToRegister());
641 return pc + Assembler::kCallSizeWithRelocation; 641 return pc + Assembler::kCallSizeWithRelocation;
642 } 642 }
643 } 643 }
644 644
645 645
646 void Assembler::deserialization_set_special_target_at( 646 void Assembler::deserialization_set_special_target_at(
647 Address constant_pool_entry, Code* code, Address target) { 647 Address constant_pool_entry, Code* code, Address target) {
648 Memory::Address_at(constant_pool_entry) = target; 648 Memory::Address_at(constant_pool_entry) = target;
649 } 649 }
650 650
(...skipping 22 matching lines...) Expand all
673 set_target_address_at(pc, constant_pool, target, icache_flush_mode); 673 set_target_address_at(pc, constant_pool, target, icache_flush_mode);
674 } 674 }
675 675
676 676
677 int RelocInfo::target_address_size() { 677 int RelocInfo::target_address_size() {
678 return kPointerSize; 678 return kPointerSize;
679 } 679 }
680 680
681 681
682 Address RelocInfo::target_address() { 682 Address RelocInfo::target_address() {
683 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); 683 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
684 return Assembler::target_address_at(pc_, host_); 684 return Assembler::target_address_at(pc_, host_);
685 } 685 }
686 686
687 687
688 Address RelocInfo::target_address_address() { 688 Address RelocInfo::target_address_address() {
689 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) 689 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
690 || rmode_ == EMBEDDED_OBJECT 690 || rmode_ == EMBEDDED_OBJECT
691 || rmode_ == EXTERNAL_REFERENCE); 691 || rmode_ == EXTERNAL_REFERENCE);
692 return Assembler::target_pointer_address_at(pc_); 692 return Assembler::target_pointer_address_at(pc_);
693 } 693 }
694 694
695 695
696 Address RelocInfo::constant_pool_entry_address() { 696 Address RelocInfo::constant_pool_entry_address() {
697 ASSERT(IsInConstantPool()); 697 DCHECK(IsInConstantPool());
698 return Assembler::target_pointer_address_at(pc_); 698 return Assembler::target_pointer_address_at(pc_);
699 } 699 }
700 700
701 701
702 Object* RelocInfo::target_object() { 702 Object* RelocInfo::target_object() {
703 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 703 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
704 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)); 704 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
705 } 705 }
706 706
707 707
708 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { 708 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
709 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 709 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
710 return Handle<Object>(reinterpret_cast<Object**>( 710 return Handle<Object>(reinterpret_cast<Object**>(
711 Assembler::target_address_at(pc_, host_))); 711 Assembler::target_address_at(pc_, host_)));
712 } 712 }
713 713
714 714
715 void RelocInfo::set_target_object(Object* target, 715 void RelocInfo::set_target_object(Object* target,
716 WriteBarrierMode write_barrier_mode, 716 WriteBarrierMode write_barrier_mode,
717 ICacheFlushMode icache_flush_mode) { 717 ICacheFlushMode icache_flush_mode) {
718 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 718 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
719 Assembler::set_target_address_at(pc_, host_, 719 Assembler::set_target_address_at(pc_, host_,
720 reinterpret_cast<Address>(target), 720 reinterpret_cast<Address>(target),
721 icache_flush_mode); 721 icache_flush_mode);
722 if (write_barrier_mode == UPDATE_WRITE_BARRIER && 722 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
723 host() != NULL && 723 host() != NULL &&
724 target->IsHeapObject()) { 724 target->IsHeapObject()) {
725 host()->GetHeap()->incremental_marking()->RecordWrite( 725 host()->GetHeap()->incremental_marking()->RecordWrite(
726 host(), &Memory::Object_at(pc_), HeapObject::cast(target)); 726 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
727 } 727 }
728 } 728 }
729 729
730 730
731 Address RelocInfo::target_reference() { 731 Address RelocInfo::target_reference() {
732 ASSERT(rmode_ == EXTERNAL_REFERENCE); 732 DCHECK(rmode_ == EXTERNAL_REFERENCE);
733 return Assembler::target_address_at(pc_, host_); 733 return Assembler::target_address_at(pc_, host_);
734 } 734 }
735 735
736 736
737 Address RelocInfo::target_runtime_entry(Assembler* origin) { 737 Address RelocInfo::target_runtime_entry(Assembler* origin) {
738 ASSERT(IsRuntimeEntry(rmode_)); 738 DCHECK(IsRuntimeEntry(rmode_));
739 return target_address(); 739 return target_address();
740 } 740 }
741 741
742 742
743 void RelocInfo::set_target_runtime_entry(Address target, 743 void RelocInfo::set_target_runtime_entry(Address target,
744 WriteBarrierMode write_barrier_mode, 744 WriteBarrierMode write_barrier_mode,
745 ICacheFlushMode icache_flush_mode) { 745 ICacheFlushMode icache_flush_mode) {
746 ASSERT(IsRuntimeEntry(rmode_)); 746 DCHECK(IsRuntimeEntry(rmode_));
747 if (target_address() != target) { 747 if (target_address() != target) {
748 set_target_address(target, write_barrier_mode, icache_flush_mode); 748 set_target_address(target, write_barrier_mode, icache_flush_mode);
749 } 749 }
750 } 750 }
751 751
752 752
753 Handle<Cell> RelocInfo::target_cell_handle() { 753 Handle<Cell> RelocInfo::target_cell_handle() {
754 UNIMPLEMENTED(); 754 UNIMPLEMENTED();
755 Cell *null_cell = NULL; 755 Cell *null_cell = NULL;
756 return Handle<Cell>(null_cell); 756 return Handle<Cell>(null_cell);
757 } 757 }
758 758
759 759
760 Cell* RelocInfo::target_cell() { 760 Cell* RelocInfo::target_cell() {
761 ASSERT(rmode_ == RelocInfo::CELL); 761 DCHECK(rmode_ == RelocInfo::CELL);
762 return Cell::FromValueAddress(Memory::Address_at(pc_)); 762 return Cell::FromValueAddress(Memory::Address_at(pc_));
763 } 763 }
764 764
765 765
766 void RelocInfo::set_target_cell(Cell* cell, 766 void RelocInfo::set_target_cell(Cell* cell,
767 WriteBarrierMode write_barrier_mode, 767 WriteBarrierMode write_barrier_mode,
768 ICacheFlushMode icache_flush_mode) { 768 ICacheFlushMode icache_flush_mode) {
769 UNIMPLEMENTED(); 769 UNIMPLEMENTED();
770 } 770 }
771 771
772 772
773 static const int kNoCodeAgeSequenceLength = 5 * kInstructionSize; 773 static const int kNoCodeAgeSequenceLength = 5 * kInstructionSize;
774 static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize; 774 static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize;
775 775
776 776
777 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { 777 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
778 UNREACHABLE(); // This should never be reached on ARM64. 778 UNREACHABLE(); // This should never be reached on ARM64.
779 return Handle<Object>(); 779 return Handle<Object>();
780 } 780 }
781 781
782 782
783 Code* RelocInfo::code_age_stub() { 783 Code* RelocInfo::code_age_stub() {
784 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); 784 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
785 // Read the stub entry point from the code age sequence. 785 // Read the stub entry point from the code age sequence.
786 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; 786 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset;
787 return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address)); 787 return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address));
788 } 788 }
789 789
790 790
791 void RelocInfo::set_code_age_stub(Code* stub, 791 void RelocInfo::set_code_age_stub(Code* stub,
792 ICacheFlushMode icache_flush_mode) { 792 ICacheFlushMode icache_flush_mode) {
793 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); 793 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
794 ASSERT(!Code::IsYoungSequence(stub->GetIsolate(), pc_)); 794 DCHECK(!Code::IsYoungSequence(stub->GetIsolate(), pc_));
795 // Overwrite the stub entry point in the code age sequence. This is loaded as 795 // Overwrite the stub entry point in the code age sequence. This is loaded as
796 // a literal so there is no need to call FlushICache here. 796 // a literal so there is no need to call FlushICache here.
797 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; 797 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset;
798 Memory::Address_at(stub_entry_address) = stub->instruction_start(); 798 Memory::Address_at(stub_entry_address) = stub->instruction_start();
799 } 799 }
800 800
801 801
802 Address RelocInfo::call_address() { 802 Address RelocInfo::call_address() {
803 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || 803 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
804 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); 804 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
805 // For the above sequences the Relocinfo points to the load literal loading 805 // For the above sequences the Relocinfo points to the load literal loading
806 // the call address. 806 // the call address.
807 return Assembler::target_address_at(pc_, host_); 807 return Assembler::target_address_at(pc_, host_);
808 } 808 }
809 809
810 810
811 void RelocInfo::set_call_address(Address target) { 811 void RelocInfo::set_call_address(Address target) {
812 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || 812 DCHECK((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
813 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); 813 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
814 Assembler::set_target_address_at(pc_, host_, target); 814 Assembler::set_target_address_at(pc_, host_, target);
815 if (host() != NULL) { 815 if (host() != NULL) {
816 Object* target_code = Code::GetCodeFromTargetAddress(target); 816 Object* target_code = Code::GetCodeFromTargetAddress(target);
817 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 817 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
818 host(), this, HeapObject::cast(target_code)); 818 host(), this, HeapObject::cast(target_code));
819 } 819 }
820 } 820 }
821 821
822 822
823 void RelocInfo::WipeOut() { 823 void RelocInfo::WipeOut() {
824 ASSERT(IsEmbeddedObject(rmode_) || 824 DCHECK(IsEmbeddedObject(rmode_) ||
825 IsCodeTarget(rmode_) || 825 IsCodeTarget(rmode_) ||
826 IsRuntimeEntry(rmode_) || 826 IsRuntimeEntry(rmode_) ||
827 IsExternalReference(rmode_)); 827 IsExternalReference(rmode_));
828 Assembler::set_target_address_at(pc_, host_, NULL); 828 Assembler::set_target_address_at(pc_, host_, NULL);
829 } 829 }
830 830
831 831
832 bool RelocInfo::IsPatchedReturnSequence() { 832 bool RelocInfo::IsPatchedReturnSequence() {
833 // The sequence must be: 833 // The sequence must be:
834 // ldr ip0, [pc, #offset] 834 // ldr ip0, [pc, #offset]
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
886 (RelocInfo::IsDebugBreakSlot(mode) && 886 (RelocInfo::IsDebugBreakSlot(mode) &&
887 IsPatchedDebugBreakSlotSequence()))) { 887 IsPatchedDebugBreakSlotSequence()))) {
888 StaticVisitor::VisitDebugTarget(heap, this); 888 StaticVisitor::VisitDebugTarget(heap, this);
889 } else if (RelocInfo::IsRuntimeEntry(mode)) { 889 } else if (RelocInfo::IsRuntimeEntry(mode)) {
890 StaticVisitor::VisitRuntimeEntry(this); 890 StaticVisitor::VisitRuntimeEntry(this);
891 } 891 }
892 } 892 }
893 893
894 894
895 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { 895 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) {
896 ASSERT(rt.IsValid()); 896 DCHECK(rt.IsValid());
897 if (rt.IsRegister()) { 897 if (rt.IsRegister()) {
898 return rt.Is64Bits() ? LDR_x : LDR_w; 898 return rt.Is64Bits() ? LDR_x : LDR_w;
899 } else { 899 } else {
900 ASSERT(rt.IsFPRegister()); 900 DCHECK(rt.IsFPRegister());
901 return rt.Is64Bits() ? LDR_d : LDR_s; 901 return rt.Is64Bits() ? LDR_d : LDR_s;
902 } 902 }
903 } 903 }
904 904
905 905
906 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, 906 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt,
907 const CPURegister& rt2) { 907 const CPURegister& rt2) {
908 ASSERT(AreSameSizeAndType(rt, rt2)); 908 DCHECK(AreSameSizeAndType(rt, rt2));
909 USE(rt2); 909 USE(rt2);
910 if (rt.IsRegister()) { 910 if (rt.IsRegister()) {
911 return rt.Is64Bits() ? LDP_x : LDP_w; 911 return rt.Is64Bits() ? LDP_x : LDP_w;
912 } else { 912 } else {
913 ASSERT(rt.IsFPRegister()); 913 DCHECK(rt.IsFPRegister());
914 return rt.Is64Bits() ? LDP_d : LDP_s; 914 return rt.Is64Bits() ? LDP_d : LDP_s;
915 } 915 }
916 } 916 }
917 917
918 918
919 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { 919 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) {
920 ASSERT(rt.IsValid()); 920 DCHECK(rt.IsValid());
921 if (rt.IsRegister()) { 921 if (rt.IsRegister()) {
922 return rt.Is64Bits() ? STR_x : STR_w; 922 return rt.Is64Bits() ? STR_x : STR_w;
923 } else { 923 } else {
924 ASSERT(rt.IsFPRegister()); 924 DCHECK(rt.IsFPRegister());
925 return rt.Is64Bits() ? STR_d : STR_s; 925 return rt.Is64Bits() ? STR_d : STR_s;
926 } 926 }
927 } 927 }
928 928
929 929
930 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, 930 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt,
931 const CPURegister& rt2) { 931 const CPURegister& rt2) {
932 ASSERT(AreSameSizeAndType(rt, rt2)); 932 DCHECK(AreSameSizeAndType(rt, rt2));
933 USE(rt2); 933 USE(rt2);
934 if (rt.IsRegister()) { 934 if (rt.IsRegister()) {
935 return rt.Is64Bits() ? STP_x : STP_w; 935 return rt.Is64Bits() ? STP_x : STP_w;
936 } else { 936 } else {
937 ASSERT(rt.IsFPRegister()); 937 DCHECK(rt.IsFPRegister());
938 return rt.Is64Bits() ? STP_d : STP_s; 938 return rt.Is64Bits() ? STP_d : STP_s;
939 } 939 }
940 } 940 }
941 941
942 942
943 LoadStorePairNonTemporalOp Assembler::LoadPairNonTemporalOpFor( 943 LoadStorePairNonTemporalOp Assembler::LoadPairNonTemporalOpFor(
944 const CPURegister& rt, const CPURegister& rt2) { 944 const CPURegister& rt, const CPURegister& rt2) {
945 ASSERT(AreSameSizeAndType(rt, rt2)); 945 DCHECK(AreSameSizeAndType(rt, rt2));
946 USE(rt2); 946 USE(rt2);
947 if (rt.IsRegister()) { 947 if (rt.IsRegister()) {
948 return rt.Is64Bits() ? LDNP_x : LDNP_w; 948 return rt.Is64Bits() ? LDNP_x : LDNP_w;
949 } else { 949 } else {
950 ASSERT(rt.IsFPRegister()); 950 DCHECK(rt.IsFPRegister());
951 return rt.Is64Bits() ? LDNP_d : LDNP_s; 951 return rt.Is64Bits() ? LDNP_d : LDNP_s;
952 } 952 }
953 } 953 }
954 954
955 955
956 LoadStorePairNonTemporalOp Assembler::StorePairNonTemporalOpFor( 956 LoadStorePairNonTemporalOp Assembler::StorePairNonTemporalOpFor(
957 const CPURegister& rt, const CPURegister& rt2) { 957 const CPURegister& rt, const CPURegister& rt2) {
958 ASSERT(AreSameSizeAndType(rt, rt2)); 958 DCHECK(AreSameSizeAndType(rt, rt2));
959 USE(rt2); 959 USE(rt2);
960 if (rt.IsRegister()) { 960 if (rt.IsRegister()) {
961 return rt.Is64Bits() ? STNP_x : STNP_w; 961 return rt.Is64Bits() ? STNP_x : STNP_w;
962 } else { 962 } else {
963 ASSERT(rt.IsFPRegister()); 963 DCHECK(rt.IsFPRegister());
964 return rt.Is64Bits() ? STNP_d : STNP_s; 964 return rt.Is64Bits() ? STNP_d : STNP_s;
965 } 965 }
966 } 966 }
967 967
968 968
969 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { 969 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) {
970 if (rt.IsRegister()) { 970 if (rt.IsRegister()) {
971 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; 971 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
972 } else { 972 } else {
973 ASSERT(rt.IsFPRegister()); 973 DCHECK(rt.IsFPRegister());
974 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; 974 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
975 } 975 }
976 } 976 }
977 977
978 978
979 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { 979 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
980 ASSERT(kStartOfLabelLinkChain == 0); 980 DCHECK(kStartOfLabelLinkChain == 0);
981 int offset = LinkAndGetByteOffsetTo(label); 981 int offset = LinkAndGetByteOffsetTo(label);
982 ASSERT(IsAligned(offset, kInstructionSize)); 982 DCHECK(IsAligned(offset, kInstructionSize));
983 return offset >> kInstructionSizeLog2; 983 return offset >> kInstructionSizeLog2;
984 } 984 }
985 985
986 986
987 Instr Assembler::Flags(FlagsUpdate S) { 987 Instr Assembler::Flags(FlagsUpdate S) {
988 if (S == SetFlags) { 988 if (S == SetFlags) {
989 return 1 << FlagsUpdate_offset; 989 return 1 << FlagsUpdate_offset;
990 } else if (S == LeaveFlags) { 990 } else if (S == LeaveFlags) {
991 return 0 << FlagsUpdate_offset; 991 return 0 << FlagsUpdate_offset;
992 } 992 }
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1027 } 1027 }
1028 1028
1029 1029
1030 Instr Assembler::ImmTestBranch(int imm14) { 1030 Instr Assembler::ImmTestBranch(int imm14) {
1031 CHECK(is_int14(imm14)); 1031 CHECK(is_int14(imm14));
1032 return truncate_to_int14(imm14) << ImmTestBranch_offset; 1032 return truncate_to_int14(imm14) << ImmTestBranch_offset;
1033 } 1033 }
1034 1034
1035 1035
1036 Instr Assembler::ImmTestBranchBit(unsigned bit_pos) { 1036 Instr Assembler::ImmTestBranchBit(unsigned bit_pos) {
1037 ASSERT(is_uint6(bit_pos)); 1037 DCHECK(is_uint6(bit_pos));
1038 // Subtract five from the shift offset, as we need bit 5 from bit_pos. 1038 // Subtract five from the shift offset, as we need bit 5 from bit_pos.
1039 unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5); 1039 unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5);
1040 unsigned b40 = bit_pos << ImmTestBranchBit40_offset; 1040 unsigned b40 = bit_pos << ImmTestBranchBit40_offset;
1041 b5 &= ImmTestBranchBit5_mask; 1041 b5 &= ImmTestBranchBit5_mask;
1042 b40 &= ImmTestBranchBit40_mask; 1042 b40 &= ImmTestBranchBit40_mask;
1043 return b5 | b40; 1043 return b5 | b40;
1044 } 1044 }
1045 1045
1046 1046
1047 Instr Assembler::SF(Register rd) { 1047 Instr Assembler::SF(Register rd) {
1048 return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits; 1048 return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits;
1049 } 1049 }
1050 1050
1051 1051
1052 Instr Assembler::ImmAddSub(int64_t imm) { 1052 Instr Assembler::ImmAddSub(int64_t imm) {
1053 ASSERT(IsImmAddSub(imm)); 1053 DCHECK(IsImmAddSub(imm));
1054 if (is_uint12(imm)) { // No shift required. 1054 if (is_uint12(imm)) { // No shift required.
1055 return imm << ImmAddSub_offset; 1055 return imm << ImmAddSub_offset;
1056 } else { 1056 } else {
1057 return ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset); 1057 return ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset);
1058 } 1058 }
1059 } 1059 }
1060 1060
1061 1061
1062 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) { 1062 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) {
1063 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(imms)) || 1063 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) ||
1064 ((reg_size == kWRegSizeInBits) && is_uint5(imms))); 1064 ((reg_size == kWRegSizeInBits) && is_uint5(imms)));
1065 USE(reg_size); 1065 USE(reg_size);
1066 return imms << ImmS_offset; 1066 return imms << ImmS_offset;
1067 } 1067 }
1068 1068
1069 1069
1070 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) { 1070 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) {
1071 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || 1071 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
1072 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); 1072 ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
1073 USE(reg_size); 1073 USE(reg_size);
1074 ASSERT(is_uint6(immr)); 1074 DCHECK(is_uint6(immr));
1075 return immr << ImmR_offset; 1075 return immr << ImmR_offset;
1076 } 1076 }
1077 1077
1078 1078
1079 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) { 1079 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) {
1080 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); 1080 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1081 ASSERT(is_uint6(imms)); 1081 DCHECK(is_uint6(imms));
1082 ASSERT((reg_size == kXRegSizeInBits) || is_uint6(imms + 3)); 1082 DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3));
1083 USE(reg_size); 1083 USE(reg_size);
1084 return imms << ImmSetBits_offset; 1084 return imms << ImmSetBits_offset;
1085 } 1085 }
1086 1086
1087 1087
1088 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) { 1088 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) {
1089 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); 1089 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1090 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || 1090 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
1091 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); 1091 ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
1092 USE(reg_size); 1092 USE(reg_size);
1093 return immr << ImmRotate_offset; 1093 return immr << ImmRotate_offset;
1094 } 1094 }
1095 1095
1096 1096
1097 Instr Assembler::ImmLLiteral(int imm19) { 1097 Instr Assembler::ImmLLiteral(int imm19) {
1098 CHECK(is_int19(imm19)); 1098 CHECK(is_int19(imm19));
1099 return truncate_to_int19(imm19) << ImmLLiteral_offset; 1099 return truncate_to_int19(imm19) << ImmLLiteral_offset;
1100 } 1100 }
1101 1101
1102 1102
1103 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) { 1103 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) {
1104 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); 1104 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1105 ASSERT((reg_size == kXRegSizeInBits) || (bitn == 0)); 1105 DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0));
1106 USE(reg_size); 1106 USE(reg_size);
1107 return bitn << BitN_offset; 1107 return bitn << BitN_offset;
1108 } 1108 }
1109 1109
1110 1110
1111 Instr Assembler::ShiftDP(Shift shift) { 1111 Instr Assembler::ShiftDP(Shift shift) {
1112 ASSERT(shift == LSL || shift == LSR || shift == ASR || shift == ROR); 1112 DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR);
1113 return shift << ShiftDP_offset; 1113 return shift << ShiftDP_offset;
1114 } 1114 }
1115 1115
1116 1116
1117 Instr Assembler::ImmDPShift(unsigned amount) { 1117 Instr Assembler::ImmDPShift(unsigned amount) {
1118 ASSERT(is_uint6(amount)); 1118 DCHECK(is_uint6(amount));
1119 return amount << ImmDPShift_offset; 1119 return amount << ImmDPShift_offset;
1120 } 1120 }
1121 1121
1122 1122
1123 Instr Assembler::ExtendMode(Extend extend) { 1123 Instr Assembler::ExtendMode(Extend extend) {
1124 return extend << ExtendMode_offset; 1124 return extend << ExtendMode_offset;
1125 } 1125 }
1126 1126
1127 1127
1128 Instr Assembler::ImmExtendShift(unsigned left_shift) { 1128 Instr Assembler::ImmExtendShift(unsigned left_shift) {
1129 ASSERT(left_shift <= 4); 1129 DCHECK(left_shift <= 4);
1130 return left_shift << ImmExtendShift_offset; 1130 return left_shift << ImmExtendShift_offset;
1131 } 1131 }
1132 1132
1133 1133
1134 Instr Assembler::ImmCondCmp(unsigned imm) { 1134 Instr Assembler::ImmCondCmp(unsigned imm) {
1135 ASSERT(is_uint5(imm)); 1135 DCHECK(is_uint5(imm));
1136 return imm << ImmCondCmp_offset; 1136 return imm << ImmCondCmp_offset;
1137 } 1137 }
1138 1138
1139 1139
1140 Instr Assembler::Nzcv(StatusFlags nzcv) { 1140 Instr Assembler::Nzcv(StatusFlags nzcv) {
1141 return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset; 1141 return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset;
1142 } 1142 }
1143 1143
1144 1144
1145 Instr Assembler::ImmLSUnsigned(int imm12) { 1145 Instr Assembler::ImmLSUnsigned(int imm12) {
1146 ASSERT(is_uint12(imm12)); 1146 DCHECK(is_uint12(imm12));
1147 return imm12 << ImmLSUnsigned_offset; 1147 return imm12 << ImmLSUnsigned_offset;
1148 } 1148 }
1149 1149
1150 1150
1151 Instr Assembler::ImmLS(int imm9) { 1151 Instr Assembler::ImmLS(int imm9) {
1152 ASSERT(is_int9(imm9)); 1152 DCHECK(is_int9(imm9));
1153 return truncate_to_int9(imm9) << ImmLS_offset; 1153 return truncate_to_int9(imm9) << ImmLS_offset;
1154 } 1154 }
1155 1155
1156 1156
1157 Instr Assembler::ImmLSPair(int imm7, LSDataSize size) { 1157 Instr Assembler::ImmLSPair(int imm7, LSDataSize size) {
1158 ASSERT(((imm7 >> size) << size) == imm7); 1158 DCHECK(((imm7 >> size) << size) == imm7);
1159 int scaled_imm7 = imm7 >> size; 1159 int scaled_imm7 = imm7 >> size;
1160 ASSERT(is_int7(scaled_imm7)); 1160 DCHECK(is_int7(scaled_imm7));
1161 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; 1161 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset;
1162 } 1162 }
1163 1163
1164 1164
1165 Instr Assembler::ImmShiftLS(unsigned shift_amount) { 1165 Instr Assembler::ImmShiftLS(unsigned shift_amount) {
1166 ASSERT(is_uint1(shift_amount)); 1166 DCHECK(is_uint1(shift_amount));
1167 return shift_amount << ImmShiftLS_offset; 1167 return shift_amount << ImmShiftLS_offset;
1168 } 1168 }
1169 1169
1170 1170
1171 Instr Assembler::ImmException(int imm16) { 1171 Instr Assembler::ImmException(int imm16) {
1172 ASSERT(is_uint16(imm16)); 1172 DCHECK(is_uint16(imm16));
1173 return imm16 << ImmException_offset; 1173 return imm16 << ImmException_offset;
1174 } 1174 }
1175 1175
1176 1176
1177 Instr Assembler::ImmSystemRegister(int imm15) { 1177 Instr Assembler::ImmSystemRegister(int imm15) {
1178 ASSERT(is_uint15(imm15)); 1178 DCHECK(is_uint15(imm15));
1179 return imm15 << ImmSystemRegister_offset; 1179 return imm15 << ImmSystemRegister_offset;
1180 } 1180 }
1181 1181
1182 1182
1183 Instr Assembler::ImmHint(int imm7) { 1183 Instr Assembler::ImmHint(int imm7) {
1184 ASSERT(is_uint7(imm7)); 1184 DCHECK(is_uint7(imm7));
1185 return imm7 << ImmHint_offset; 1185 return imm7 << ImmHint_offset;
1186 } 1186 }
1187 1187
1188 1188
1189 Instr Assembler::ImmBarrierDomain(int imm2) { 1189 Instr Assembler::ImmBarrierDomain(int imm2) {
1190 ASSERT(is_uint2(imm2)); 1190 DCHECK(is_uint2(imm2));
1191 return imm2 << ImmBarrierDomain_offset; 1191 return imm2 << ImmBarrierDomain_offset;
1192 } 1192 }
1193 1193
1194 1194
1195 Instr Assembler::ImmBarrierType(int imm2) { 1195 Instr Assembler::ImmBarrierType(int imm2) {
1196 ASSERT(is_uint2(imm2)); 1196 DCHECK(is_uint2(imm2));
1197 return imm2 << ImmBarrierType_offset; 1197 return imm2 << ImmBarrierType_offset;
1198 } 1198 }
1199 1199
1200 1200
1201 LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) { 1201 LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) {
1202 ASSERT((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8)); 1202 DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8));
1203 return static_cast<LSDataSize>(op >> SizeLS_offset); 1203 return static_cast<LSDataSize>(op >> SizeLS_offset);
1204 } 1204 }
1205 1205
1206 1206
1207 Instr Assembler::ImmMoveWide(uint64_t imm) { 1207 Instr Assembler::ImmMoveWide(uint64_t imm) {
1208 ASSERT(is_uint16(imm)); 1208 DCHECK(is_uint16(imm));
1209 return imm << ImmMoveWide_offset; 1209 return imm << ImmMoveWide_offset;
1210 } 1210 }
1211 1211
1212 1212
1213 Instr Assembler::ShiftMoveWide(int64_t shift) { 1213 Instr Assembler::ShiftMoveWide(int64_t shift) {
1214 ASSERT(is_uint2(shift)); 1214 DCHECK(is_uint2(shift));
1215 return shift << ShiftMoveWide_offset; 1215 return shift << ShiftMoveWide_offset;
1216 } 1216 }
1217 1217
1218 1218
1219 Instr Assembler::FPType(FPRegister fd) { 1219 Instr Assembler::FPType(FPRegister fd) {
1220 return fd.Is64Bits() ? FP64 : FP32; 1220 return fd.Is64Bits() ? FP64 : FP32;
1221 } 1221 }
1222 1222
1223 1223
1224 Instr Assembler::FPScale(unsigned scale) { 1224 Instr Assembler::FPScale(unsigned scale) {
1225 ASSERT(is_uint6(scale)); 1225 DCHECK(is_uint6(scale));
1226 return scale << FPScale_offset; 1226 return scale << FPScale_offset;
1227 } 1227 }
1228 1228
1229 1229
1230 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { 1230 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
1231 return reg.Is64Bits() ? xzr : wzr; 1231 return reg.Is64Bits() ? xzr : wzr;
1232 } 1232 }
1233 1233
1234 1234
1235 inline void Assembler::CheckBufferSpace() { 1235 inline void Assembler::CheckBufferSpace() {
1236 ASSERT(pc_ < (buffer_ + buffer_size_)); 1236 DCHECK(pc_ < (buffer_ + buffer_size_));
1237 if (buffer_space() < kGap) { 1237 if (buffer_space() < kGap) {
1238 GrowBuffer(); 1238 GrowBuffer();
1239 } 1239 }
1240 } 1240 }
1241 1241
1242 1242
1243 inline void Assembler::CheckBuffer() { 1243 inline void Assembler::CheckBuffer() {
1244 CheckBufferSpace(); 1244 CheckBufferSpace();
1245 if (pc_offset() >= next_veneer_pool_check_) { 1245 if (pc_offset() >= next_veneer_pool_check_) {
1246 CheckVeneerPool(false, true); 1246 CheckVeneerPool(false, true);
1247 } 1247 }
1248 if (pc_offset() >= next_constant_pool_check_) { 1248 if (pc_offset() >= next_constant_pool_check_) {
1249 CheckConstPool(false, true); 1249 CheckConstPool(false, true);
1250 } 1250 }
1251 } 1251 }
1252 1252
1253 1253
1254 TypeFeedbackId Assembler::RecordedAstId() { 1254 TypeFeedbackId Assembler::RecordedAstId() {
1255 ASSERT(!recorded_ast_id_.IsNone()); 1255 DCHECK(!recorded_ast_id_.IsNone());
1256 return recorded_ast_id_; 1256 return recorded_ast_id_;
1257 } 1257 }
1258 1258
1259 1259
1260 void Assembler::ClearRecordedAstId() { 1260 void Assembler::ClearRecordedAstId() {
1261 recorded_ast_id_ = TypeFeedbackId::None(); 1261 recorded_ast_id_ = TypeFeedbackId::None();
1262 } 1262 }
1263 1263
1264 1264
1265 } } // namespace v8::internal 1265 } } // namespace v8::internal
1266 1266
1267 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ 1267 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_
OLDNEW
« no previous file with comments | « src/arm64/assembler-arm64.cc ('k') | src/arm64/builtins-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698