Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/x87/macro-assembler-x87.cc

Issue 293743005: Introduce x87 port (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: rebase Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x87/macro-assembler-x87.h ('k') | src/x87/regexp-macro-assembler-x87.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "v8.h" 5 #include "v8.h"
6 6
7 #if V8_TARGET_ARCH_IA32 7 #if V8_TARGET_ARCH_X87
8 8
9 #include "bootstrapper.h" 9 #include "bootstrapper.h"
10 #include "codegen.h" 10 #include "codegen.h"
11 #include "cpu-profiler.h" 11 #include "cpu-profiler.h"
12 #include "debug.h" 12 #include "debug.h"
13 #include "isolate-inl.h" 13 #include "isolate-inl.h"
14 #include "runtime.h" 14 #include "runtime.h"
15 #include "serialize.h" 15 #include "serialize.h"
16 16
17 namespace v8 { 17 namespace v8 {
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
136 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), 136 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
137 static_cast<uint8_t>(mask)); 137 static_cast<uint8_t>(mask));
138 j(cc, condition_met, condition_met_distance); 138 j(cc, condition_met, condition_met_distance);
139 } 139 }
140 140
141 141
142 void MacroAssembler::RememberedSetHelper( 142 void MacroAssembler::RememberedSetHelper(
143 Register object, // Only used for debug checks. 143 Register object, // Only used for debug checks.
144 Register addr, 144 Register addr,
145 Register scratch, 145 Register scratch,
146 SaveFPRegsMode save_fp,
147 MacroAssembler::RememberedSetFinalAction and_then) { 146 MacroAssembler::RememberedSetFinalAction and_then) {
148 Label done; 147 Label done;
149 if (emit_debug_code()) { 148 if (emit_debug_code()) {
150 Label ok; 149 Label ok;
151 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear); 150 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
152 int3(); 151 int3();
153 bind(&ok); 152 bind(&ok);
154 } 153 }
155 // Load store buffer top. 154 // Load store buffer top.
156 ExternalReference store_buffer = 155 ExternalReference store_buffer =
(...skipping 11 matching lines...) Expand all
168 if (and_then == kReturnAtEnd) { 167 if (and_then == kReturnAtEnd) {
169 Label buffer_overflowed; 168 Label buffer_overflowed;
170 j(not_equal, &buffer_overflowed, Label::kNear); 169 j(not_equal, &buffer_overflowed, Label::kNear);
171 ret(0); 170 ret(0);
172 bind(&buffer_overflowed); 171 bind(&buffer_overflowed);
173 } else { 172 } else {
174 ASSERT(and_then == kFallThroughAtEnd); 173 ASSERT(and_then == kFallThroughAtEnd);
175 j(equal, &done, Label::kNear); 174 j(equal, &done, Label::kNear);
176 } 175 }
177 StoreBufferOverflowStub store_buffer_overflow = 176 StoreBufferOverflowStub store_buffer_overflow =
178 StoreBufferOverflowStub(isolate(), save_fp); 177 StoreBufferOverflowStub(isolate());
179 CallStub(&store_buffer_overflow); 178 CallStub(&store_buffer_overflow);
180 if (and_then == kReturnAtEnd) { 179 if (and_then == kReturnAtEnd) {
181 ret(0); 180 ret(0);
182 } else { 181 } else {
183 ASSERT(and_then == kFallThroughAtEnd); 182 ASSERT(and_then == kFallThroughAtEnd);
184 bind(&done); 183 bind(&done);
185 } 184 }
186 } 185 }
187 186
188 187
189 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
190 XMMRegister scratch_reg,
191 Register result_reg) {
192 Label done;
193 Label conv_failure;
194 xorps(scratch_reg, scratch_reg);
195 cvtsd2si(result_reg, input_reg);
196 test(result_reg, Immediate(0xFFFFFF00));
197 j(zero, &done, Label::kNear);
198 cmp(result_reg, Immediate(0x1));
199 j(overflow, &conv_failure, Label::kNear);
200 mov(result_reg, Immediate(0));
201 setcc(sign, result_reg);
202 sub(result_reg, Immediate(1));
203 and_(result_reg, Immediate(255));
204 jmp(&done, Label::kNear);
205 bind(&conv_failure);
206 Move(result_reg, Immediate(0));
207 ucomisd(input_reg, scratch_reg);
208 j(below, &done, Label::kNear);
209 Move(result_reg, Immediate(255));
210 bind(&done);
211 }
212
213
214 void MacroAssembler::ClampUint8(Register reg) { 188 void MacroAssembler::ClampUint8(Register reg) {
215 Label done; 189 Label done;
216 test(reg, Immediate(0xFFFFFF00)); 190 test(reg, Immediate(0xFFFFFF00));
217 j(zero, &done, Label::kNear); 191 j(zero, &done, Label::kNear);
218 setcc(negative, reg); // 1 if negative, 0 if positive. 192 setcc(negative, reg); // 1 if negative, 0 if positive.
219 dec_b(reg); // 0 if negative, 255 if positive. 193 dec_b(reg); // 0 if negative, 255 if positive.
220 bind(&done); 194 bind(&done);
221 } 195 }
222 196
223 197
224 void MacroAssembler::SlowTruncateToI(Register result_reg, 198 void MacroAssembler::SlowTruncateToI(Register result_reg,
225 Register input_reg, 199 Register input_reg,
226 int offset) { 200 int offset) {
227 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true); 201 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
228 call(stub.GetCode(), RelocInfo::CODE_TARGET); 202 call(stub.GetCode(), RelocInfo::CODE_TARGET);
229 } 203 }
230 204
231 205
232 void MacroAssembler::TruncateDoubleToI(Register result_reg, 206 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
233 XMMRegister input_reg) {
234 Label done;
235 cvttsd2si(result_reg, Operand(input_reg));
236 cmp(result_reg, 0x1);
237 j(no_overflow, &done, Label::kNear);
238
239 sub(esp, Immediate(kDoubleSize)); 207 sub(esp, Immediate(kDoubleSize));
240 movsd(MemOperand(esp, 0), input_reg); 208 fst_d(MemOperand(esp, 0));
241 SlowTruncateToI(result_reg, esp, 0); 209 SlowTruncateToI(result_reg, esp, 0);
242 add(esp, Immediate(kDoubleSize)); 210 add(esp, Immediate(kDoubleSize));
211 }
212
213
214 void MacroAssembler::X87TOSToI(Register result_reg,
215 MinusZeroMode minus_zero_mode,
216 Label* conversion_failed,
217 Label::Distance dst) {
218 Label done;
219 sub(esp, Immediate(kPointerSize));
220 fld(0);
221 fist_s(MemOperand(esp, 0));
222 fild_s(MemOperand(esp, 0));
223 pop(result_reg);
224 FCmp();
225 j(not_equal, conversion_failed, dst);
226 j(parity_even, conversion_failed, dst);
227 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
228 test(result_reg, Operand(result_reg));
229 j(not_zero, &done, Label::kNear);
230 // To check for minus zero, we load the value again as float, and check
231 // if that is still 0.
232 sub(esp, Immediate(kPointerSize));
233 fst_s(MemOperand(esp, 0));
234 pop(result_reg);
235 test(result_reg, Operand(result_reg));
236 j(not_zero, conversion_failed, dst);
237 }
243 bind(&done); 238 bind(&done);
244 } 239 }
245 240
246 241
247 void MacroAssembler::DoubleToI(Register result_reg,
248 XMMRegister input_reg,
249 XMMRegister scratch,
250 MinusZeroMode minus_zero_mode,
251 Label* conversion_failed,
252 Label::Distance dst) {
253 ASSERT(!input_reg.is(scratch));
254 cvttsd2si(result_reg, Operand(input_reg));
255 Cvtsi2sd(scratch, Operand(result_reg));
256 ucomisd(scratch, input_reg);
257 j(not_equal, conversion_failed, dst);
258 j(parity_even, conversion_failed, dst); // NaN.
259 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
260 Label done;
261 // The integer converted back is equal to the original. We
262 // only have to test if we got -0 as an input.
263 test(result_reg, Operand(result_reg));
264 j(not_zero, &done, Label::kNear);
265 movmskpd(result_reg, input_reg);
266 // Bit 0 contains the sign of the double in input_reg.
267 // If input was positive, we are ok and return 0, otherwise
268 // jump to conversion_failed.
269 and_(result_reg, 1);
270 j(not_zero, conversion_failed, dst);
271 bind(&done);
272 }
273 }
274
275
276 void MacroAssembler::TruncateHeapNumberToI(Register result_reg, 242 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
277 Register input_reg) { 243 Register input_reg) {
278 Label done, slow_case; 244 Label done, slow_case;
279 245
280 if (CpuFeatures::IsSupported(SSE3)) { 246 SlowTruncateToI(result_reg, input_reg);
281 CpuFeatureScope scope(this, SSE3);
282 Label convert;
283 // Use more powerful conversion when sse3 is available.
284 // Load x87 register with heap number.
285 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
286 // Get exponent alone and check for too-big exponent.
287 mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
288 and_(result_reg, HeapNumber::kExponentMask);
289 const uint32_t kTooBigExponent =
290 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
291 cmp(Operand(result_reg), Immediate(kTooBigExponent));
292 j(greater_equal, &slow_case, Label::kNear);
293
294 // Reserve space for 64 bit answer.
295 sub(Operand(esp), Immediate(kDoubleSize));
296 // Do conversion, which cannot fail because we checked the exponent.
297 fisttp_d(Operand(esp, 0));
298 mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
299 add(Operand(esp), Immediate(kDoubleSize));
300 jmp(&done, Label::kNear);
301
302 // Slow case.
303 bind(&slow_case);
304 if (input_reg.is(result_reg)) {
305 // Input is clobbered. Restore number from fpu stack
306 sub(Operand(esp), Immediate(kDoubleSize));
307 fstp_d(Operand(esp, 0));
308 SlowTruncateToI(result_reg, esp, 0);
309 add(esp, Immediate(kDoubleSize));
310 } else {
311 fstp(0);
312 SlowTruncateToI(result_reg, input_reg);
313 }
314 } else {
315 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
316 cvttsd2si(result_reg, Operand(xmm0));
317 cmp(result_reg, 0x1);
318 j(no_overflow, &done, Label::kNear);
319 // Check if the input was 0x8000000 (kMinInt).
320 // If no, then we got an overflow and we deoptimize.
321 ExternalReference min_int = ExternalReference::address_of_min_int();
322 ucomisd(xmm0, Operand::StaticVariable(min_int));
323 j(not_equal, &slow_case, Label::kNear);
324 j(parity_even, &slow_case, Label::kNear); // NaN.
325 jmp(&done, Label::kNear);
326
327 // Slow case.
328 bind(&slow_case);
329 if (input_reg.is(result_reg)) {
330 // Input is clobbered. Restore number from double scratch.
331 sub(esp, Immediate(kDoubleSize));
332 movsd(MemOperand(esp, 0), xmm0);
333 SlowTruncateToI(result_reg, esp, 0);
334 add(esp, Immediate(kDoubleSize));
335 } else {
336 SlowTruncateToI(result_reg, input_reg);
337 }
338 }
339 bind(&done); 247 bind(&done);
340 } 248 }
341 249
342 250
343 void MacroAssembler::TaggedToI(Register result_reg, 251 void MacroAssembler::TaggedToI(Register result_reg,
344 Register input_reg, 252 Register input_reg,
345 XMMRegister temp,
346 MinusZeroMode minus_zero_mode, 253 MinusZeroMode minus_zero_mode,
347 Label* lost_precision) { 254 Label* lost_precision) {
348 Label done; 255 Label done;
349 ASSERT(!temp.is(xmm0));
350 256
351 cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 257 cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
352 isolate()->factory()->heap_number_map()); 258 isolate()->factory()->heap_number_map());
353 j(not_equal, lost_precision, Label::kNear); 259 j(not_equal, lost_precision, Label::kNear);
354 260
355 ASSERT(!temp.is(no_xmm_reg)); 261 // TODO(olivf) Converting a number on the fpu is actually quite slow. We
356 262 // should first try a fast conversion and then bailout to this slow case.
357 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); 263 Label lost_precision_pop, zero_check;
358 cvttsd2si(result_reg, Operand(xmm0)); 264 Label* lost_precision_int = (minus_zero_mode == FAIL_ON_MINUS_ZERO)
359 Cvtsi2sd(temp, Operand(result_reg)); 265 ? &lost_precision_pop : lost_precision;
360 ucomisd(xmm0, temp); 266 sub(esp, Immediate(kPointerSize));
361 RecordComment("Deferred TaggedToI: lost precision"); 267 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
362 j(not_equal, lost_precision, Label::kNear); 268 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) fld(0);
363 RecordComment("Deferred TaggedToI: NaN"); 269 fist_s(MemOperand(esp, 0));
364 j(parity_even, lost_precision, Label::kNear); 270 fild_s(MemOperand(esp, 0));
271 FCmp();
272 pop(result_reg);
273 j(not_equal, lost_precision_int, Label::kNear);
274 j(parity_even, lost_precision_int, Label::kNear); // NaN.
365 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) { 275 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
366 test(result_reg, Operand(result_reg)); 276 test(result_reg, Operand(result_reg));
367 j(not_zero, &done, Label::kNear); 277 j(zero, &zero_check, Label::kNear);
368 movmskpd(result_reg, xmm0); 278 fstp(0);
369 and_(result_reg, 1); 279 jmp(&done, Label::kNear);
370 RecordComment("Deferred TaggedToI: minus zero"); 280 bind(&zero_check);
371 j(not_zero, lost_precision, Label::kNear); 281 // To check for minus zero, we load the value again as float, and check
282 // if that is still 0.
283 sub(esp, Immediate(kPointerSize));
284 fstp_s(Operand(esp, 0));
285 pop(result_reg);
286 test(result_reg, Operand(result_reg));
287 j(zero, &done, Label::kNear);
288 jmp(lost_precision, Label::kNear);
289
290 bind(&lost_precision_pop);
291 fstp(0);
292 jmp(lost_precision, Label::kNear);
372 } 293 }
373 bind(&done); 294 bind(&done);
374 } 295 }
375 296
376 297
377 void MacroAssembler::LoadUint32(XMMRegister dst, 298 void MacroAssembler::LoadUint32NoSSE2(Register src) {
378 Register src,
379 XMMRegister scratch) {
380 Label done; 299 Label done;
300 push(src);
301 fild_s(Operand(esp, 0));
381 cmp(src, Immediate(0)); 302 cmp(src, Immediate(0));
303 j(not_sign, &done, Label::kNear);
382 ExternalReference uint32_bias = 304 ExternalReference uint32_bias =
383 ExternalReference::address_of_uint32_bias(); 305 ExternalReference::address_of_uint32_bias();
384 movsd(scratch, Operand::StaticVariable(uint32_bias)); 306 fld_d(Operand::StaticVariable(uint32_bias));
385 Cvtsi2sd(dst, src); 307 faddp(1);
386 j(not_sign, &done, Label::kNear);
387 addsd(dst, scratch);
388 bind(&done); 308 bind(&done);
309 add(esp, Immediate(kPointerSize));
389 } 310 }
390 311
391 312
392 void MacroAssembler::RecordWriteArray(Register object, 313 void MacroAssembler::RecordWriteArray(Register object,
393 Register value, 314 Register value,
394 Register index, 315 Register index,
395 SaveFPRegsMode save_fp,
396 RememberedSetAction remembered_set_action, 316 RememberedSetAction remembered_set_action,
397 SmiCheck smi_check) { 317 SmiCheck smi_check) {
398 // First, check if a write barrier is even needed. The tests below 318 // First, check if a write barrier is even needed. The tests below
399 // catch stores of Smis. 319 // catch stores of Smis.
400 Label done; 320 Label done;
401 321
402 // Skip barrier if writing a smi. 322 // Skip barrier if writing a smi.
403 if (smi_check == INLINE_SMI_CHECK) { 323 if (smi_check == INLINE_SMI_CHECK) {
404 ASSERT_EQ(0, kSmiTag); 324 ASSERT_EQ(0, kSmiTag);
405 test(value, Immediate(kSmiTagMask)); 325 test(value, Immediate(kSmiTagMask));
406 j(zero, &done); 326 j(zero, &done);
407 } 327 }
408 328
409 // Array access: calculate the destination address in the same manner as 329 // Array access: calculate the destination address in the same manner as
410 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset 330 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
411 // into an array of words. 331 // into an array of words.
412 Register dst = index; 332 Register dst = index;
413 lea(dst, Operand(object, index, times_half_pointer_size, 333 lea(dst, Operand(object, index, times_half_pointer_size,
414 FixedArray::kHeaderSize - kHeapObjectTag)); 334 FixedArray::kHeaderSize - kHeapObjectTag));
415 335
416 RecordWrite( 336 RecordWrite(
417 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); 337 object, dst, value, remembered_set_action, OMIT_SMI_CHECK);
418 338
419 bind(&done); 339 bind(&done);
420 340
421 // Clobber clobbered input registers when running with the debug-code flag 341 // Clobber clobbered input registers when running with the debug-code flag
422 // turned on to provoke errors. 342 // turned on to provoke errors.
423 if (emit_debug_code()) { 343 if (emit_debug_code()) {
424 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 344 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
425 mov(index, Immediate(BitCast<int32_t>(kZapValue))); 345 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
426 } 346 }
427 } 347 }
428 348
429 349
430 void MacroAssembler::RecordWriteField( 350 void MacroAssembler::RecordWriteField(
431 Register object, 351 Register object,
432 int offset, 352 int offset,
433 Register value, 353 Register value,
434 Register dst, 354 Register dst,
435 SaveFPRegsMode save_fp,
436 RememberedSetAction remembered_set_action, 355 RememberedSetAction remembered_set_action,
437 SmiCheck smi_check) { 356 SmiCheck smi_check) {
438 // First, check if a write barrier is even needed. The tests below 357 // First, check if a write barrier is even needed. The tests below
439 // catch stores of Smis. 358 // catch stores of Smis.
440 Label done; 359 Label done;
441 360
442 // Skip barrier if writing a smi. 361 // Skip barrier if writing a smi.
443 if (smi_check == INLINE_SMI_CHECK) { 362 if (smi_check == INLINE_SMI_CHECK) {
444 JumpIfSmi(value, &done, Label::kNear); 363 JumpIfSmi(value, &done, Label::kNear);
445 } 364 }
446 365
447 // Although the object register is tagged, the offset is relative to the start 366 // Although the object register is tagged, the offset is relative to the start
448 // of the object, so so offset must be a multiple of kPointerSize. 367 // of the object, so so offset must be a multiple of kPointerSize.
449 ASSERT(IsAligned(offset, kPointerSize)); 368 ASSERT(IsAligned(offset, kPointerSize));
450 369
451 lea(dst, FieldOperand(object, offset)); 370 lea(dst, FieldOperand(object, offset));
452 if (emit_debug_code()) { 371 if (emit_debug_code()) {
453 Label ok; 372 Label ok;
454 test_b(dst, (1 << kPointerSizeLog2) - 1); 373 test_b(dst, (1 << kPointerSizeLog2) - 1);
455 j(zero, &ok, Label::kNear); 374 j(zero, &ok, Label::kNear);
456 int3(); 375 int3();
457 bind(&ok); 376 bind(&ok);
458 } 377 }
459 378
460 RecordWrite( 379 RecordWrite(
461 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); 380 object, dst, value, remembered_set_action, OMIT_SMI_CHECK);
462 381
463 bind(&done); 382 bind(&done);
464 383
465 // Clobber clobbered input registers when running with the debug-code flag 384 // Clobber clobbered input registers when running with the debug-code flag
466 // turned on to provoke errors. 385 // turned on to provoke errors.
467 if (emit_debug_code()) { 386 if (emit_debug_code()) {
468 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 387 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
469 mov(dst, Immediate(BitCast<int32_t>(kZapValue))); 388 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
470 } 389 }
471 } 390 }
472 391
473 392
474 void MacroAssembler::RecordWriteForMap( 393 void MacroAssembler::RecordWriteForMap(
475 Register object, 394 Register object,
476 Handle<Map> map, 395 Handle<Map> map,
477 Register scratch1, 396 Register scratch1,
478 Register scratch2, 397 Register scratch2) {
479 SaveFPRegsMode save_fp) {
480 Label done; 398 Label done;
481 399
482 Register address = scratch1; 400 Register address = scratch1;
483 Register value = scratch2; 401 Register value = scratch2;
484 if (emit_debug_code()) { 402 if (emit_debug_code()) {
485 Label ok; 403 Label ok;
486 lea(address, FieldOperand(object, HeapObject::kMapOffset)); 404 lea(address, FieldOperand(object, HeapObject::kMapOffset));
487 test_b(address, (1 << kPointerSizeLog2) - 1); 405 test_b(address, (1 << kPointerSizeLog2) - 1);
488 j(zero, &ok, Label::kNear); 406 j(zero, &ok, Label::kNear);
489 int3(); 407 int3();
(...skipping 23 matching lines...) Expand all
513 zero, 431 zero,
514 &done, 432 &done,
515 Label::kNear); 433 Label::kNear);
516 434
517 // Delay the initialization of |address| and |value| for the stub until it's 435 // Delay the initialization of |address| and |value| for the stub until it's
518 // known that the will be needed. Up until this point their values are not 436 // known that the will be needed. Up until this point their values are not
519 // needed since they are embedded in the operands of instructions that need 437 // needed since they are embedded in the operands of instructions that need
520 // them. 438 // them.
521 lea(address, FieldOperand(object, HeapObject::kMapOffset)); 439 lea(address, FieldOperand(object, HeapObject::kMapOffset));
522 mov(value, Immediate(map)); 440 mov(value, Immediate(map));
523 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET, 441 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET);
524 save_fp);
525 CallStub(&stub); 442 CallStub(&stub);
526 443
527 bind(&done); 444 bind(&done);
528 445
529 // Clobber clobbered input registers when running with the debug-code flag 446 // Clobber clobbered input registers when running with the debug-code flag
530 // turned on to provoke errors. 447 // turned on to provoke errors.
531 if (emit_debug_code()) { 448 if (emit_debug_code()) {
532 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 449 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
533 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue))); 450 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
534 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue))); 451 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
535 } 452 }
536 } 453 }
537 454
538 455
539 void MacroAssembler::RecordWrite(Register object, 456 void MacroAssembler::RecordWrite(Register object,
540 Register address, 457 Register address,
541 Register value, 458 Register value,
542 SaveFPRegsMode fp_mode,
543 RememberedSetAction remembered_set_action, 459 RememberedSetAction remembered_set_action,
544 SmiCheck smi_check) { 460 SmiCheck smi_check) {
545 ASSERT(!object.is(value)); 461 ASSERT(!object.is(value));
546 ASSERT(!object.is(address)); 462 ASSERT(!object.is(address));
547 ASSERT(!value.is(address)); 463 ASSERT(!value.is(address));
548 AssertNotSmi(object); 464 AssertNotSmi(object);
549 465
550 if (remembered_set_action == OMIT_REMEMBERED_SET && 466 if (remembered_set_action == OMIT_REMEMBERED_SET &&
551 !FLAG_incremental_marking) { 467 !FLAG_incremental_marking) {
552 return; 468 return;
(...skipping 26 matching lines...) Expand all
579 zero, 495 zero,
580 &done, 496 &done,
581 Label::kNear); 497 Label::kNear);
582 CheckPageFlag(object, 498 CheckPageFlag(object,
583 value, // Used as scratch. 499 value, // Used as scratch.
584 MemoryChunk::kPointersFromHereAreInterestingMask, 500 MemoryChunk::kPointersFromHereAreInterestingMask,
585 zero, 501 zero,
586 &done, 502 &done,
587 Label::kNear); 503 Label::kNear);
588 504
589 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action, 505 RecordWriteStub stub(isolate(), object, value, address,
590 fp_mode); 506 remembered_set_action);
591 CallStub(&stub); 507 CallStub(&stub);
592 508
593 bind(&done); 509 bind(&done);
594 510
595 // Clobber clobbered registers when running with the debug-code flag 511 // Clobber clobbered registers when running with the debug-code flag
596 // turned on to provoke errors. 512 // turned on to provoke errors.
597 if (emit_debug_code()) { 513 if (emit_debug_code()) {
598 mov(address, Immediate(BitCast<int32_t>(kZapValue))); 514 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
599 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 515 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
600 } 516 }
601 } 517 }
602 518
603 519
604 void MacroAssembler::DebugBreak() { 520 void MacroAssembler::DebugBreak() {
605 Move(eax, Immediate(0)); 521 Move(eax, Immediate(0));
606 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate()))); 522 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
607 CEntryStub ces(isolate(), 1); 523 CEntryStub ces(isolate(), 1);
608 call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 524 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
609 } 525 }
610 526
611 527
612 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
613 xorps(dst, dst);
614 cvtsi2sd(dst, src);
615 }
616
617
618 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) { 528 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
619 static const int kMaxImmediateBits = 17; 529 static const int kMaxImmediateBits = 17;
620 if (!RelocInfo::IsNone(x.rmode_)) return false; 530 if (!RelocInfo::IsNone(x.rmode_)) return false;
621 return !is_intn(x.x_, kMaxImmediateBits); 531 return !is_intn(x.x_, kMaxImmediateBits);
622 } 532 }
623 533
624 534
625 void MacroAssembler::SafeMove(Register dst, const Immediate& x) { 535 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
626 if (IsUnsafeImmediate(x) && jit_cookie() != 0) { 536 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
627 Move(dst, Immediate(x.x_ ^ jit_cookie())); 537 Move(dst, Immediate(x.x_ ^ jit_cookie()));
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
693 cmpb(FieldOperand(map, Map::kBitField2Offset), 603 cmpb(FieldOperand(map, Map::kBitField2Offset),
694 Map::kMaximumBitField2FastHoleySmiElementValue); 604 Map::kMaximumBitField2FastHoleySmiElementValue);
695 j(above, fail, distance); 605 j(above, fail, distance);
696 } 606 }
697 607
698 608
699 void MacroAssembler::StoreNumberToDoubleElements( 609 void MacroAssembler::StoreNumberToDoubleElements(
700 Register maybe_number, 610 Register maybe_number,
701 Register elements, 611 Register elements,
702 Register key, 612 Register key,
703 Register scratch1, 613 Register scratch,
704 XMMRegister scratch2,
705 Label* fail, 614 Label* fail,
706 int elements_offset) { 615 int elements_offset) {
707 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value; 616 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
708 JumpIfSmi(maybe_number, &smi_value, Label::kNear); 617 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
709 618
710 CheckMap(maybe_number, 619 CheckMap(maybe_number,
711 isolate()->factory()->heap_number_map(), 620 isolate()->factory()->heap_number_map(),
712 fail, 621 fail,
713 DONT_DO_SMI_CHECK); 622 DONT_DO_SMI_CHECK);
714 623
715 // Double value, canonicalize NaN. 624 // Double value, canonicalize NaN.
716 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32); 625 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
717 cmp(FieldOperand(maybe_number, offset), 626 cmp(FieldOperand(maybe_number, offset),
718 Immediate(kNaNOrInfinityLowerBoundUpper32)); 627 Immediate(kNaNOrInfinityLowerBoundUpper32));
719 j(greater_equal, &maybe_nan, Label::kNear); 628 j(greater_equal, &maybe_nan, Label::kNear);
720 629
721 bind(&not_nan); 630 bind(&not_nan);
722 ExternalReference canonical_nan_reference = 631 ExternalReference canonical_nan_reference =
723 ExternalReference::address_of_canonical_non_hole_nan(); 632 ExternalReference::address_of_canonical_non_hole_nan();
724 movsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset)); 633 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
725 bind(&have_double_value); 634 bind(&have_double_value);
726 movsd(FieldOperand(elements, key, times_4, 635 fstp_d(FieldOperand(elements, key, times_4,
727 FixedDoubleArray::kHeaderSize - elements_offset), 636 FixedDoubleArray::kHeaderSize - elements_offset));
728 scratch2);
729 jmp(&done); 637 jmp(&done);
730 638
731 bind(&maybe_nan); 639 bind(&maybe_nan);
732 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise 640 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
733 // it's an Infinity, and the non-NaN code path applies. 641 // it's an Infinity, and the non-NaN code path applies.
734 j(greater, &is_nan, Label::kNear); 642 j(greater, &is_nan, Label::kNear);
735 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0)); 643 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
736 j(zero, &not_nan); 644 j(zero, &not_nan);
737 bind(&is_nan); 645 bind(&is_nan);
738 movsd(scratch2, Operand::StaticVariable(canonical_nan_reference)); 646 fld_d(Operand::StaticVariable(canonical_nan_reference));
739 jmp(&have_double_value, Label::kNear); 647 jmp(&have_double_value, Label::kNear);
740 648
741 bind(&smi_value); 649 bind(&smi_value);
742 // Value is a smi. Convert to a double and store. 650 // Value is a smi. Convert to a double and store.
743 // Preserve original value. 651 // Preserve original value.
744 mov(scratch1, maybe_number); 652 mov(scratch, maybe_number);
745 SmiUntag(scratch1); 653 SmiUntag(scratch);
746 Cvtsi2sd(scratch2, scratch1); 654 push(scratch);
747 movsd(FieldOperand(elements, key, times_4, 655 fild_s(Operand(esp, 0));
748 FixedDoubleArray::kHeaderSize - elements_offset), 656 pop(scratch);
749 scratch2); 657 fstp_d(FieldOperand(elements, key, times_4,
658 FixedDoubleArray::kHeaderSize - elements_offset));
750 bind(&done); 659 bind(&done);
751 } 660 }
752 661
753 662
754 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) { 663 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
755 cmp(FieldOperand(obj, HeapObject::kMapOffset), map); 664 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
756 } 665 }
757 666
758 667
759 void MacroAssembler::CheckMap(Register obj, 668 void MacroAssembler::CheckMap(Register obj,
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
820 Label* fail) { 729 Label* fail) {
821 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset)); 730 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
822 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 731 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
823 cmp(scratch, 732 cmp(scratch,
824 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 733 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
825 j(above, fail); 734 j(above, fail);
826 } 735 }
827 736
828 737
829 void MacroAssembler::FCmp() { 738 void MacroAssembler::FCmp() {
830 fucomip(); 739 fucompp();
831 fstp(0); 740 push(eax);
741 fnstsw_ax();
742 sahf();
743 pop(eax);
832 } 744 }
833 745
834 746
835 void MacroAssembler::AssertNumber(Register object) { 747 void MacroAssembler::AssertNumber(Register object) {
836 if (emit_debug_code()) { 748 if (emit_debug_code()) {
837 Label ok; 749 Label ok;
838 JumpIfSmi(object, &ok); 750 JumpIfSmi(object, &ok);
839 cmp(FieldOperand(object, HeapObject::kMapOffset), 751 cmp(FieldOperand(object, HeapObject::kMapOffset),
840 isolate()->factory()->heap_number_map()); 752 isolate()->factory()->heap_number_map());
841 Check(equal, kOperandNotANumber); 753 Check(equal, kOperandNotANumber);
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
962 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot. 874 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
963 875
964 // Save the frame pointer and the context in top. 876 // Save the frame pointer and the context in top.
965 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate()); 877 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
966 ExternalReference context_address(Isolate::kContextAddress, isolate()); 878 ExternalReference context_address(Isolate::kContextAddress, isolate());
967 mov(Operand::StaticVariable(c_entry_fp_address), ebp); 879 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
968 mov(Operand::StaticVariable(context_address), esi); 880 mov(Operand::StaticVariable(context_address), esi);
969 } 881 }
970 882
971 883
972 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) { 884 void MacroAssembler::EnterExitFrameEpilogue(int argc) {
973 // Optionally save all XMM registers. 885 sub(esp, Immediate(argc * kPointerSize));
974 if (save_doubles) {
975 int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
976 argc * kPointerSize;
977 sub(esp, Immediate(space));
978 const int offset = -2 * kPointerSize;
979 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
980 XMMRegister reg = XMMRegister::from_code(i);
981 movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
982 }
983 } else {
984 sub(esp, Immediate(argc * kPointerSize));
985 }
986 886
987 // Get the required frame alignment for the OS. 887 // Get the required frame alignment for the OS.
988 const int kFrameAlignment = OS::ActivationFrameAlignment(); 888 const int kFrameAlignment = OS::ActivationFrameAlignment();
989 if (kFrameAlignment > 0) { 889 if (kFrameAlignment > 0) {
990 ASSERT(IsPowerOf2(kFrameAlignment)); 890 ASSERT(IsPowerOf2(kFrameAlignment));
991 and_(esp, -kFrameAlignment); 891 and_(esp, -kFrameAlignment);
992 } 892 }
993 893
994 // Patch the saved entry sp. 894 // Patch the saved entry sp.
995 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp); 895 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
996 } 896 }
997 897
998 898
999 void MacroAssembler::EnterExitFrame(bool save_doubles) { 899 void MacroAssembler::EnterExitFrame() {
1000 EnterExitFramePrologue(); 900 EnterExitFramePrologue();
1001 901
1002 // Set up argc and argv in callee-saved registers. 902 // Set up argc and argv in callee-saved registers.
1003 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; 903 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1004 mov(edi, eax); 904 mov(edi, eax);
1005 lea(esi, Operand(ebp, eax, times_4, offset)); 905 lea(esi, Operand(ebp, eax, times_4, offset));
1006 906
1007 // Reserve space for argc, argv and isolate. 907 // Reserve space for argc, argv and isolate.
1008 EnterExitFrameEpilogue(3, save_doubles); 908 EnterExitFrameEpilogue(3);
1009 } 909 }
1010 910
1011 911
1012 void MacroAssembler::EnterApiExitFrame(int argc) { 912 void MacroAssembler::EnterApiExitFrame(int argc) {
1013 EnterExitFramePrologue(); 913 EnterExitFramePrologue();
1014 EnterExitFrameEpilogue(argc, false); 914 EnterExitFrameEpilogue(argc);
1015 } 915 }
1016 916
1017 917
1018 void MacroAssembler::LeaveExitFrame(bool save_doubles) { 918 void MacroAssembler::LeaveExitFrame() {
1019 // Optionally restore all XMM registers.
1020 if (save_doubles) {
1021 const int offset = -2 * kPointerSize;
1022 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1023 XMMRegister reg = XMMRegister::from_code(i);
1024 movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
1025 }
1026 }
1027
1028 // Get the return address from the stack and restore the frame pointer. 919 // Get the return address from the stack and restore the frame pointer.
1029 mov(ecx, Operand(ebp, 1 * kPointerSize)); 920 mov(ecx, Operand(ebp, 1 * kPointerSize));
1030 mov(ebp, Operand(ebp, 0 * kPointerSize)); 921 mov(ebp, Operand(ebp, 0 * kPointerSize));
1031 922
1032 // Pop the arguments and the receiver from the caller stack. 923 // Pop the arguments and the receiver from the caller stack.
1033 lea(esp, Operand(esi, 1 * kPointerSize)); 924 lea(esp, Operand(esi, 1 * kPointerSize));
1034 925
1035 // Push the return address to get ready to return. 926 // Push the return address to get ready to return.
1036 push(ecx); 927 push(ecx);
1037 928
(...skipping 1032 matching lines...) Expand 10 before | Expand all | Expand 10 after
2070 if (String::kHashShift > kSmiTagSize) { 1961 if (String::kHashShift > kSmiTagSize) {
2071 shr(hash, String::kHashShift - kSmiTagSize); 1962 shr(hash, String::kHashShift - kSmiTagSize);
2072 } 1963 }
2073 if (!index.is(hash)) { 1964 if (!index.is(hash)) {
2074 mov(index, hash); 1965 mov(index, hash);
2075 } 1966 }
2076 } 1967 }
2077 1968
2078 1969
2079 void MacroAssembler::CallRuntime(const Runtime::Function* f, 1970 void MacroAssembler::CallRuntime(const Runtime::Function* f,
2080 int num_arguments, 1971 int num_arguments) {
2081 SaveFPRegsMode save_doubles) {
2082 // If the expected number of arguments of the runtime function is 1972 // If the expected number of arguments of the runtime function is
2083 // constant, we check that the actual number of arguments match the 1973 // constant, we check that the actual number of arguments match the
2084 // expectation. 1974 // expectation.
2085 CHECK(f->nargs < 0 || f->nargs == num_arguments); 1975 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2086 1976
2087 // TODO(1236192): Most runtime routines don't need the number of 1977 // TODO(1236192): Most runtime routines don't need the number of
2088 // arguments passed in because it is constant. At some point we 1978 // arguments passed in because it is constant. At some point we
2089 // should remove this need and make the runtime routine entry code 1979 // should remove this need and make the runtime routine entry code
2090 // smarter. 1980 // smarter.
2091 Move(eax, Immediate(num_arguments)); 1981 Move(eax, Immediate(num_arguments));
2092 mov(ebx, Immediate(ExternalReference(f, isolate()))); 1982 mov(ebx, Immediate(ExternalReference(f, isolate())));
2093 CEntryStub ces(isolate(), 1, save_doubles); 1983 CEntryStub ces(isolate(), 1);
2094 CallStub(&ces); 1984 CallStub(&ces);
2095 } 1985 }
2096 1986
2097 1987
2098 void MacroAssembler::CallExternalReference(ExternalReference ref, 1988 void MacroAssembler::CallExternalReference(ExternalReference ref,
2099 int num_arguments) { 1989 int num_arguments) {
2100 mov(eax, Immediate(num_arguments)); 1990 mov(eax, Immediate(num_arguments));
2101 mov(ebx, Immediate(ref)); 1991 mov(ebx, Immediate(ref));
2102 1992
2103 CEntryStub stub(isolate(), 1); 1993 CEntryStub stub(isolate(), 1);
(...skipping 528 matching lines...) Expand 10 before | Expand all | Expand 10 after
2632 ret(bytes_dropped); 2522 ret(bytes_dropped);
2633 } else { 2523 } else {
2634 pop(scratch); 2524 pop(scratch);
2635 add(esp, Immediate(bytes_dropped)); 2525 add(esp, Immediate(bytes_dropped));
2636 push(scratch); 2526 push(scratch);
2637 ret(0); 2527 ret(0);
2638 } 2528 }
2639 } 2529 }
2640 2530
2641 2531
2532 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2533 // Make sure the floating point stack is either empty or has depth items.
2534 ASSERT(depth <= 7);
2535 // This is very expensive.
2536 ASSERT(FLAG_debug_code && FLAG_enable_slow_asserts);
2537
2538 // The top-of-stack (tos) is 7 if there is one item pushed.
2539 int tos = (8 - depth) % 8;
2540 const int kTopMask = 0x3800;
2541 push(eax);
2542 fwait();
2543 fnstsw_ax();
2544 and_(eax, kTopMask);
2545 shr(eax, 11);
2546 cmp(eax, Immediate(tos));
2547 Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2548 fnclex();
2549 pop(eax);
2550 }
2551
2552
2642 void MacroAssembler::Drop(int stack_elements) { 2553 void MacroAssembler::Drop(int stack_elements) {
2643 if (stack_elements > 0) { 2554 if (stack_elements > 0) {
2644 add(esp, Immediate(stack_elements * kPointerSize)); 2555 add(esp, Immediate(stack_elements * kPointerSize));
2645 } 2556 }
2646 } 2557 }
2647 2558
2648 2559
2649 void MacroAssembler::Move(Register dst, Register src) { 2560 void MacroAssembler::Move(Register dst, Register src) {
2650 if (!dst.is(src)) { 2561 if (!dst.is(src)) {
2651 mov(dst, src); 2562 mov(dst, src);
2652 } 2563 }
2653 } 2564 }
2654 2565
2655 2566
2656 void MacroAssembler::Move(Register dst, const Immediate& x) { 2567 void MacroAssembler::Move(Register dst, const Immediate& x) {
2657 if (x.is_zero()) { 2568 if (x.is_zero()) {
2658 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0. 2569 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2659 } else { 2570 } else {
2660 mov(dst, x); 2571 mov(dst, x);
2661 } 2572 }
2662 } 2573 }
2663 2574
2664 2575
2665 void MacroAssembler::Move(const Operand& dst, const Immediate& x) { 2576 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2666 mov(dst, x); 2577 mov(dst, x);
2667 } 2578 }
2668 2579
2669 2580
2670 void MacroAssembler::Move(XMMRegister dst, double val) {
2671 // TODO(titzer): recognize double constants with ExternalReferences.
2672 uint64_t int_val = BitCast<uint64_t, double>(val);
2673 if (int_val == 0) {
2674 xorps(dst, dst);
2675 } else {
2676 int32_t lower = static_cast<int32_t>(int_val);
2677 int32_t upper = static_cast<int32_t>(int_val >> kBitsPerInt);
2678 push(Immediate(upper));
2679 push(Immediate(lower));
2680 movsd(dst, Operand(esp, 0));
2681 add(esp, Immediate(kDoubleSize));
2682 }
2683 }
2684
2685
2686 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { 2581 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2687 if (FLAG_native_code_counters && counter->Enabled()) { 2582 if (FLAG_native_code_counters && counter->Enabled()) {
2688 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value)); 2583 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2689 } 2584 }
2690 } 2585 }
2691 2586
2692 2587
2693 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { 2588 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2694 ASSERT(value > 0); 2589 ASSERT(value > 0);
2695 if (FLAG_native_code_counters && counter->Enabled()) { 2590 if (FLAG_native_code_counters && counter->Enabled()) {
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
2863 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset)); 2758 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2864 } 2759 }
2865 2760
2866 2761
2867 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) { 2762 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2868 mov(dst, FieldOperand(map, Map::kBitField3Offset)); 2763 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2869 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst); 2764 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2870 } 2765 }
2871 2766
2872 2767
2873 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2874 Register scratch,
2875 int power) {
2876 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2877 HeapNumber::kExponentBits));
2878 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2879 movd(dst, scratch);
2880 psllq(dst, HeapNumber::kMantissaBits);
2881 }
2882
2883
2884 void MacroAssembler::LookupNumberStringCache(Register object, 2768 void MacroAssembler::LookupNumberStringCache(Register object,
2885 Register result, 2769 Register result,
2886 Register scratch1, 2770 Register scratch1,
2887 Register scratch2, 2771 Register scratch2,
2888 Label* not_found) { 2772 Label* not_found) {
2889 // Use of registers. Register result is used as a temporary. 2773 // Use of registers. Register result is used as a temporary.
2890 Register number_string_cache = result; 2774 Register number_string_cache = result;
2891 Register mask = scratch1; 2775 Register mask = scratch1;
2892 Register scratch = scratch2; 2776 Register scratch = scratch2;
2893 2777
(...skipping 27 matching lines...) Expand all
2921 // Object is heap number and hash is now in scratch. Calculate cache index. 2805 // Object is heap number and hash is now in scratch. Calculate cache index.
2922 and_(scratch, mask); 2806 and_(scratch, mask);
2923 Register index = scratch; 2807 Register index = scratch;
2924 Register probe = mask; 2808 Register probe = mask;
2925 mov(probe, 2809 mov(probe,
2926 FieldOperand(number_string_cache, 2810 FieldOperand(number_string_cache,
2927 index, 2811 index,
2928 times_twice_pointer_size, 2812 times_twice_pointer_size,
2929 FixedArray::kHeaderSize)); 2813 FixedArray::kHeaderSize));
2930 JumpIfSmi(probe, not_found); 2814 JumpIfSmi(probe, not_found);
2931 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); 2815 fld_d(FieldOperand(object, HeapNumber::kValueOffset));
2932 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset)); 2816 fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
2817 FCmp();
2933 j(parity_even, not_found); // Bail out if NaN is involved. 2818 j(parity_even, not_found); // Bail out if NaN is involved.
2934 j(not_equal, not_found); // The cache did not contain this value. 2819 j(not_equal, not_found); // The cache did not contain this value.
2935 jmp(&load_result_from_cache, Label::kNear); 2820 jmp(&load_result_from_cache, Label::kNear);
2936 2821
2937 bind(&smi_hash_calculated); 2822 bind(&smi_hash_calculated);
2938 // Object is smi and hash is now in scratch. Calculate cache index. 2823 // Object is smi and hash is now in scratch. Calculate cache index.
2939 and_(scratch, mask); 2824 and_(scratch, mask);
2940 // Check if the entry is the smi we are looking for. 2825 // Check if the entry is the smi we are looking for.
2941 cmp(object, 2826 cmp(object,
2942 FieldOperand(number_string_cache, 2827 FieldOperand(number_string_cache,
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after
3458 if (divisor < 0 && ms.multiplier() > 0) sub(edx, dividend); 3343 if (divisor < 0 && ms.multiplier() > 0) sub(edx, dividend);
3459 if (ms.shift() > 0) sar(edx, ms.shift()); 3344 if (ms.shift() > 0) sar(edx, ms.shift());
3460 mov(eax, dividend); 3345 mov(eax, dividend);
3461 shr(eax, 31); 3346 shr(eax, 31);
3462 add(edx, eax); 3347 add(edx, eax);
3463 } 3348 }
3464 3349
3465 3350
3466 } } // namespace v8::internal 3351 } } // namespace v8::internal
3467 3352
3468 #endif // V8_TARGET_ARCH_IA32 3353 #endif // V8_TARGET_ARCH_X87
OLDNEW
« no previous file with comments | « src/x87/macro-assembler-x87.h ('k') | src/x87/regexp-macro-assembler-x87.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698