Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(12)

Side by Side Diff: src/x64/macro-assembler-x64.h

Issue 6991010: Remove NearLabel, replacing remaining occurrences with Label (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.cc ('k') | src/x64/macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
138 // For page containing |object| mark region covering |addr| dirty. 138 // For page containing |object| mark region covering |addr| dirty.
139 // RecordWriteHelper only works if the object is not in new 139 // RecordWriteHelper only works if the object is not in new
140 // space. 140 // space.
141 void RecordWriteHelper(Register object, 141 void RecordWriteHelper(Register object,
142 Register addr, 142 Register addr,
143 Register scratch); 143 Register scratch);
144 144
145 // Check if object is in new space. The condition cc can be equal or 145 // Check if object is in new space. The condition cc can be equal or
146 // not_equal. If it is equal a jump will be done if the object is on new 146 // not_equal. If it is equal a jump will be done if the object is on new
147 // space. The register scratch can be object itself, but it will be clobbered. 147 // space. The register scratch can be object itself, but it will be clobbered.
148 template <typename LabelType>
149 void InNewSpace(Register object, 148 void InNewSpace(Register object,
150 Register scratch, 149 Register scratch,
151 Condition cc, 150 Condition cc,
152 LabelType* branch); 151 Label* branch,
152 Label::Distance near_jump = Label::kFar);
153 153
154 // For page containing |object| mark region covering [object+offset] 154 // For page containing |object| mark region covering [object+offset]
155 // dirty. |object| is the object being stored into, |value| is the 155 // dirty. |object| is the object being stored into, |value| is the
156 // object being stored. If |offset| is zero, then the |scratch| 156 // object being stored. If |offset| is zero, then the |scratch|
157 // register contains the array index into the elements array 157 // register contains the array index into the elements array
158 // represented as an untagged 32-bit integer. All registers are 158 // represented as an untagged 32-bit integer. All registers are
159 // clobbered by the operation. RecordWrite filters out smis so it 159 // clobbered by the operation. RecordWrite filters out smis so it
160 // does not update the write barrier if the value is a smi. 160 // does not update the write barrier if the value is a smi.
161 void RecordWrite(Register object, 161 void RecordWrite(Register object,
162 int offset, 162 int offset,
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 // Divide a positive smi's integer value by a power of two. 319 // Divide a positive smi's integer value by a power of two.
320 // Provides result as 32-bit integer value. 320 // Provides result as 32-bit integer value.
321 void PositiveSmiDivPowerOfTwoToInteger32(Register dst, 321 void PositiveSmiDivPowerOfTwoToInteger32(Register dst,
322 Register src, 322 Register src,
323 int power); 323 int power);
324 324
325 // Perform the logical or of two smi values and return a smi value. 325 // Perform the logical or of two smi values and return a smi value.
326 // If either argument is not a smi, jump to on_not_smis and retain 326 // If either argument is not a smi, jump to on_not_smis and retain
327 // the original values of source registers. The destination register 327 // the original values of source registers. The destination register
328 // may be changed if it's not one of the source registers. 328 // may be changed if it's not one of the source registers.
329 template <typename LabelType>
330 void SmiOrIfSmis(Register dst, 329 void SmiOrIfSmis(Register dst,
331 Register src1, 330 Register src1,
332 Register src2, 331 Register src2,
333 LabelType* on_not_smis); 332 Label* on_not_smis,
333 Label::Distance near_jump = Label::kFar);
334 334
335 335
336 // Simple comparison of smis. Both sides must be known smis to use these, 336 // Simple comparison of smis. Both sides must be known smis to use these,
337 // otherwise use Cmp. 337 // otherwise use Cmp.
338 void SmiCompare(Register smi1, Register smi2); 338 void SmiCompare(Register smi1, Register smi2);
339 void SmiCompare(Register dst, Smi* src); 339 void SmiCompare(Register dst, Smi* src);
340 void SmiCompare(Register dst, const Operand& src); 340 void SmiCompare(Register dst, const Operand& src);
341 void SmiCompare(const Operand& dst, Register src); 341 void SmiCompare(const Operand& dst, Register src);
342 void SmiCompare(const Operand& dst, Smi* src); 342 void SmiCompare(const Operand& dst, Smi* src);
343 // Compare the int32 in src register to the value of the smi stored at dst. 343 // Compare the int32 in src register to the value of the smi stored at dst.
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
381 381
382 // Check whether src is a Smi, and set dst to zero if it is a smi, 382 // Check whether src is a Smi, and set dst to zero if it is a smi,
383 // and to one if it isn't. 383 // and to one if it isn't.
384 void CheckSmiToIndicator(Register dst, Register src); 384 void CheckSmiToIndicator(Register dst, Register src);
385 void CheckSmiToIndicator(Register dst, const Operand& src); 385 void CheckSmiToIndicator(Register dst, const Operand& src);
386 386
387 // Test-and-jump functions. Typically combines a check function 387 // Test-and-jump functions. Typically combines a check function
388 // above with a conditional jump. 388 // above with a conditional jump.
389 389
390 // Jump if the value cannot be represented by a smi. 390 // Jump if the value cannot be represented by a smi.
391 template <typename LabelType> 391 void JumpIfNotValidSmiValue(Register src, Label* on_invalid,
392 void JumpIfNotValidSmiValue(Register src, LabelType* on_invalid); 392 Label::Distance near_jump = Label::kFar);
393 393
394 // Jump if the unsigned integer value cannot be represented by a smi. 394 // Jump if the unsigned integer value cannot be represented by a smi.
395 template <typename LabelType> 395 void JumpIfUIntNotValidSmiValue(Register src, Label* on_invalid,
396 void JumpIfUIntNotValidSmiValue(Register src, LabelType* on_invalid); 396 Label::Distance near_jump = Label::kFar);
397 397
398 // Jump to label if the value is a tagged smi. 398 // Jump to label if the value is a tagged smi.
399 template <typename LabelType> 399 void JumpIfSmi(Register src,
400 void JumpIfSmi(Register src, LabelType* on_smi); 400 Label* on_smi,
401 Label::Distance near_jump = Label::kFar);
401 402
402 // Jump to label if the value is not a tagged smi. 403 // Jump to label if the value is not a tagged smi.
403 template <typename LabelType> 404 void JumpIfNotSmi(Register src,
404 void JumpIfNotSmi(Register src, LabelType* on_not_smi); 405 Label* on_not_smi,
406 Label::Distance near_jump = Label::kFar);
405 407
406 // Jump to label if the value is not a non-negative tagged smi. 408 // Jump to label if the value is not a non-negative tagged smi.
407 template <typename LabelType> 409 void JumpUnlessNonNegativeSmi(Register src,
408 void JumpUnlessNonNegativeSmi(Register src, LabelType* on_not_smi); 410 Label* on_not_smi,
411 Label::Distance near_jump = Label::kFar);
409 412
410 // Jump to label if the value, which must be a tagged smi, has value equal 413 // Jump to label if the value, which must be a tagged smi, has value equal
411 // to the constant. 414 // to the constant.
412 template <typename LabelType>
413 void JumpIfSmiEqualsConstant(Register src, 415 void JumpIfSmiEqualsConstant(Register src,
414 Smi* constant, 416 Smi* constant,
415 LabelType* on_equals); 417 Label* on_equals,
418 Label::Distance near_jump = Label::kFar);
416 419
417 // Jump if either or both register are not smi values. 420 // Jump if either or both register are not smi values.
418 template <typename LabelType>
419 void JumpIfNotBothSmi(Register src1, 421 void JumpIfNotBothSmi(Register src1,
420 Register src2, 422 Register src2,
421 LabelType* on_not_both_smi); 423 Label* on_not_both_smi,
424 Label::Distance near_jump = Label::kFar);
422 425
423 // Jump if either or both register are not non-negative smi values. 426 // Jump if either or both register are not non-negative smi values.
424 template <typename LabelType>
425 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2, 427 void JumpUnlessBothNonNegativeSmi(Register src1, Register src2,
426 LabelType* on_not_both_smi); 428 Label* on_not_both_smi,
429 Label::Distance near_jump = Label::kFar);
427 430
428 // Operations on tagged smi values. 431 // Operations on tagged smi values.
429 432
430 // Smis represent a subset of integers. The subset is always equivalent to 433 // Smis represent a subset of integers. The subset is always equivalent to
431 // a two's complement interpretation of a fixed number of bits. 434 // a two's complement interpretation of a fixed number of bits.
432 435
433 // Optimistically adds an integer constant to a supposed smi. 436 // Optimistically adds an integer constant to a supposed smi.
434 // If the src is not a smi, or the result is not a smi, jump to 437 // If the src is not a smi, or the result is not a smi, jump to
435 // the label. 438 // the label.
436 template <typename LabelType>
437 void SmiTryAddConstant(Register dst, 439 void SmiTryAddConstant(Register dst,
438 Register src, 440 Register src,
439 Smi* constant, 441 Smi* constant,
440 LabelType* on_not_smi_result); 442 Label* on_not_smi_result,
443 Label::Distance near_jump = Label::kFar);
441 444
442 // Add an integer constant to a tagged smi, giving a tagged smi as result. 445 // Add an integer constant to a tagged smi, giving a tagged smi as result.
443 // No overflow testing on the result is done. 446 // No overflow testing on the result is done.
444 void SmiAddConstant(Register dst, Register src, Smi* constant); 447 void SmiAddConstant(Register dst, Register src, Smi* constant);
445 448
446 // Add an integer constant to a tagged smi, giving a tagged smi as result. 449 // Add an integer constant to a tagged smi, giving a tagged smi as result.
447 // No overflow testing on the result is done. 450 // No overflow testing on the result is done.
448 void SmiAddConstant(const Operand& dst, Smi* constant); 451 void SmiAddConstant(const Operand& dst, Smi* constant);
449 452
450 // Add an integer constant to a tagged smi, giving a tagged smi as result, 453 // Add an integer constant to a tagged smi, giving a tagged smi as result,
451 // or jumping to a label if the result cannot be represented by a smi. 454 // or jumping to a label if the result cannot be represented by a smi.
452 template <typename LabelType>
453 void SmiAddConstant(Register dst, 455 void SmiAddConstant(Register dst,
454 Register src, 456 Register src,
455 Smi* constant, 457 Smi* constant,
456 LabelType* on_not_smi_result); 458 Label* on_not_smi_result,
459 Label::Distance near_jump = Label::kFar);
457 460
458 // Subtract an integer constant from a tagged smi, giving a tagged smi as 461 // Subtract an integer constant from a tagged smi, giving a tagged smi as
459 // result. No testing on the result is done. Sets the N and Z flags 462 // result. No testing on the result is done. Sets the N and Z flags
460 // based on the value of the resulting integer. 463 // based on the value of the resulting integer.
461 void SmiSubConstant(Register dst, Register src, Smi* constant); 464 void SmiSubConstant(Register dst, Register src, Smi* constant);
462 465
463 // Subtract an integer constant from a tagged smi, giving a tagged smi as 466 // Subtract an integer constant from a tagged smi, giving a tagged smi as
464 // result, or jumping to a label if the result cannot be represented by a smi. 467 // result, or jumping to a label if the result cannot be represented by a smi.
465 template <typename LabelType>
466 void SmiSubConstant(Register dst, 468 void SmiSubConstant(Register dst,
467 Register src, 469 Register src,
468 Smi* constant, 470 Smi* constant,
469 LabelType* on_not_smi_result); 471 Label* on_not_smi_result,
472 Label::Distance near_jump = Label::kFar);
470 473
471 // Negating a smi can give a negative zero or too large positive value. 474 // Negating a smi can give a negative zero or too large positive value.
472 // NOTICE: This operation jumps on success, not failure! 475 // NOTICE: This operation jumps on success, not failure!
473 template <typename LabelType>
474 void SmiNeg(Register dst, 476 void SmiNeg(Register dst,
475 Register src, 477 Register src,
476 LabelType* on_smi_result); 478 Label* on_smi_result,
479 Label::Distance near_jump = Label::kFar);
477 480
478 // Adds smi values and return the result as a smi. 481 // Adds smi values and return the result as a smi.
479 // If dst is src1, then src1 will be destroyed, even if 482 // If dst is src1, then src1 will be destroyed, even if
480 // the operation is unsuccessful. 483 // the operation is unsuccessful.
481 template <typename LabelType>
482 void SmiAdd(Register dst, 484 void SmiAdd(Register dst,
483 Register src1, 485 Register src1,
484 Register src2, 486 Register src2,
485 LabelType* on_not_smi_result); 487 Label* on_not_smi_result,
486 template <typename LabelType> 488 Label::Distance near_jump = Label::kFar);
487 void SmiAdd(Register dst, 489 void SmiAdd(Register dst,
488 Register src1, 490 Register src1,
489 const Operand& src2, 491 const Operand& src2,
490 LabelType* on_not_smi_result); 492 Label* on_not_smi_result,
493 Label::Distance near_jump = Label::kFar);
491 494
492 void SmiAdd(Register dst, 495 void SmiAdd(Register dst,
493 Register src1, 496 Register src1,
494 Register src2); 497 Register src2);
495 498
496 // Subtracts smi values and return the result as a smi. 499 // Subtracts smi values and return the result as a smi.
497 // If dst is src1, then src1 will be destroyed, even if 500 // If dst is src1, then src1 will be destroyed, even if
498 // the operation is unsuccessful. 501 // the operation is unsuccessful.
499 template <typename LabelType>
500 void SmiSub(Register dst, 502 void SmiSub(Register dst,
501 Register src1, 503 Register src1,
502 Register src2, 504 Register src2,
503 LabelType* on_not_smi_result); 505 Label* on_not_smi_result,
506 Label::Distance near_jump = Label::kFar);
504 507
505 void SmiSub(Register dst, 508 void SmiSub(Register dst,
506 Register src1, 509 Register src1,
507 Register src2); 510 Register src2);
508 511
509 template <typename LabelType>
510 void SmiSub(Register dst, 512 void SmiSub(Register dst,
511 Register src1, 513 Register src1,
512 const Operand& src2, 514 const Operand& src2,
513 LabelType* on_not_smi_result); 515 Label* on_not_smi_result,
516 Label::Distance near_jump = Label::kFar);
514 517
515 void SmiSub(Register dst, 518 void SmiSub(Register dst,
516 Register src1, 519 Register src1,
517 const Operand& src2); 520 const Operand& src2);
518 521
519 // Multiplies smi values and return the result as a smi, 522 // Multiplies smi values and return the result as a smi,
520 // if possible. 523 // if possible.
521 // If dst is src1, then src1 will be destroyed, even if 524 // If dst is src1, then src1 will be destroyed, even if
522 // the operation is unsuccessful. 525 // the operation is unsuccessful.
523 template <typename LabelType>
524 void SmiMul(Register dst, 526 void SmiMul(Register dst,
525 Register src1, 527 Register src1,
526 Register src2, 528 Register src2,
527 LabelType* on_not_smi_result); 529 Label* on_not_smi_result,
530 Label::Distance near_jump = Label::kFar);
528 531
529 // Divides one smi by another and returns the quotient. 532 // Divides one smi by another and returns the quotient.
530 // Clobbers rax and rdx registers. 533 // Clobbers rax and rdx registers.
531 template <typename LabelType>
532 void SmiDiv(Register dst, 534 void SmiDiv(Register dst,
533 Register src1, 535 Register src1,
534 Register src2, 536 Register src2,
535 LabelType* on_not_smi_result); 537 Label* on_not_smi_result,
538 Label::Distance near_jump = Label::kFar);
536 539
537 // Divides one smi by another and returns the remainder. 540 // Divides one smi by another and returns the remainder.
538 // Clobbers rax and rdx registers. 541 // Clobbers rax and rdx registers.
539 template <typename LabelType>
540 void SmiMod(Register dst, 542 void SmiMod(Register dst,
541 Register src1, 543 Register src1,
542 Register src2, 544 Register src2,
543 LabelType* on_not_smi_result); 545 Label* on_not_smi_result,
546 Label::Distance near_jump = Label::kFar);
544 547
545 // Bitwise operations. 548 // Bitwise operations.
546 void SmiNot(Register dst, Register src); 549 void SmiNot(Register dst, Register src);
547 void SmiAnd(Register dst, Register src1, Register src2); 550 void SmiAnd(Register dst, Register src1, Register src2);
548 void SmiOr(Register dst, Register src1, Register src2); 551 void SmiOr(Register dst, Register src1, Register src2);
549 void SmiXor(Register dst, Register src1, Register src2); 552 void SmiXor(Register dst, Register src1, Register src2);
550 void SmiAndConstant(Register dst, Register src1, Smi* constant); 553 void SmiAndConstant(Register dst, Register src1, Smi* constant);
551 void SmiOrConstant(Register dst, Register src1, Smi* constant); 554 void SmiOrConstant(Register dst, Register src1, Smi* constant);
552 void SmiXorConstant(Register dst, Register src1, Smi* constant); 555 void SmiXorConstant(Register dst, Register src1, Smi* constant);
553 556
554 void SmiShiftLeftConstant(Register dst, 557 void SmiShiftLeftConstant(Register dst,
555 Register src, 558 Register src,
556 int shift_value); 559 int shift_value);
557 template <typename LabelType>
558 void SmiShiftLogicalRightConstant(Register dst, 560 void SmiShiftLogicalRightConstant(Register dst,
559 Register src, 561 Register src,
560 int shift_value, 562 int shift_value,
561 LabelType* on_not_smi_result); 563 Label* on_not_smi_result,
564 Label::Distance near_jump = Label::kFar);
562 void SmiShiftArithmeticRightConstant(Register dst, 565 void SmiShiftArithmeticRightConstant(Register dst,
563 Register src, 566 Register src,
564 int shift_value); 567 int shift_value);
565 568
566 // Shifts a smi value to the left, and returns the result if that is a smi. 569 // Shifts a smi value to the left, and returns the result if that is a smi.
567 // Uses and clobbers rcx, so dst may not be rcx. 570 // Uses and clobbers rcx, so dst may not be rcx.
568 void SmiShiftLeft(Register dst, 571 void SmiShiftLeft(Register dst,
569 Register src1, 572 Register src1,
570 Register src2); 573 Register src2);
571 // Shifts a smi value to the right, shifting in zero bits at the top, and 574 // Shifts a smi value to the right, shifting in zero bits at the top, and
572 // returns the unsigned intepretation of the result if that is a smi. 575 // returns the unsigned intepretation of the result if that is a smi.
573 // Uses and clobbers rcx, so dst may not be rcx. 576 // Uses and clobbers rcx, so dst may not be rcx.
574 template <typename LabelType>
575 void SmiShiftLogicalRight(Register dst, 577 void SmiShiftLogicalRight(Register dst,
576 Register src1, 578 Register src1,
577 Register src2, 579 Register src2,
578 LabelType* on_not_smi_result); 580 Label* on_not_smi_result,
581 Label::Distance near_jump = Label::kFar);
579 // Shifts a smi value to the right, sign extending the top, and 582 // Shifts a smi value to the right, sign extending the top, and
580 // returns the signed intepretation of the result. That will always 583 // returns the signed intepretation of the result. That will always
581 // be a valid smi value, since it's numerically smaller than the 584 // be a valid smi value, since it's numerically smaller than the
582 // original. 585 // original.
583 // Uses and clobbers rcx, so dst may not be rcx. 586 // Uses and clobbers rcx, so dst may not be rcx.
584 void SmiShiftArithmeticRight(Register dst, 587 void SmiShiftArithmeticRight(Register dst,
585 Register src1, 588 Register src1,
586 Register src2); 589 Register src2);
587 590
588 // Specialized operations 591 // Specialized operations
589 592
590 // Select the non-smi register of two registers where exactly one is a 593 // Select the non-smi register of two registers where exactly one is a
591 // smi. If neither are smis, jump to the failure label. 594 // smi. If neither are smis, jump to the failure label.
592 template <typename LabelType>
593 void SelectNonSmi(Register dst, 595 void SelectNonSmi(Register dst,
594 Register src1, 596 Register src1,
595 Register src2, 597 Register src2,
596 LabelType* on_not_smis); 598 Label* on_not_smis,
599 Label::Distance near_jump = Label::kFar);
597 600
598 // Converts, if necessary, a smi to a combination of number and 601 // Converts, if necessary, a smi to a combination of number and
599 // multiplier to be used as a scaled index. 602 // multiplier to be used as a scaled index.
600 // The src register contains a *positive* smi value. The shift is the 603 // The src register contains a *positive* smi value. The shift is the
601 // power of two to multiply the index value by (e.g. 604 // power of two to multiply the index value by (e.g.
602 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2). 605 // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
603 // The returned index register may be either src or dst, depending 606 // The returned index register may be either src or dst, depending
604 // on what is most efficient. If src and dst are different registers, 607 // on what is most efficient. If src and dst are different registers,
605 // src is always unchanged. 608 // src is always unchanged.
606 SmiIndex SmiToIndex(Register dst, Register src, int shift); 609 SmiIndex SmiToIndex(Register dst, Register src, int shift);
(...skipping 15 matching lines...) Expand all
622 movq(dst, constant); 625 movq(dst, constant);
623 } 626 }
624 627
625 void Push(Smi* smi); 628 void Push(Smi* smi);
626 void Test(const Operand& dst, Smi* source); 629 void Test(const Operand& dst, Smi* source);
627 630
628 // --------------------------------------------------------------------------- 631 // ---------------------------------------------------------------------------
629 // String macros. 632 // String macros.
630 633
631 // If object is a string, its map is loaded into object_map. 634 // If object is a string, its map is loaded into object_map.
632 template <typename LabelType>
633 void JumpIfNotString(Register object, 635 void JumpIfNotString(Register object,
634 Register object_map, 636 Register object_map,
635 LabelType* not_string); 637 Label* not_string,
638 Label::Distance near_jump = Label::kFar);
636 639
637 640
638 template <typename LabelType> 641 void JumpIfNotBothSequentialAsciiStrings(
639 void JumpIfNotBothSequentialAsciiStrings(Register first_object, 642 Register first_object,
640 Register second_object, 643 Register second_object,
641 Register scratch1, 644 Register scratch1,
642 Register scratch2, 645 Register scratch2,
643 LabelType* on_not_both_flat_ascii); 646 Label* on_not_both_flat_ascii,
647 Label::Distance near_jump = Label::kFar);
644 648
645 // Check whether the instance type represents a flat ascii string. Jump to the 649 // Check whether the instance type represents a flat ascii string. Jump to the
646 // label if not. If the instance type can be scratched specify same register 650 // label if not. If the instance type can be scratched specify same register
647 // for both instance type and scratch. 651 // for both instance type and scratch.
648 template <typename LabelType>
649 void JumpIfInstanceTypeIsNotSequentialAscii( 652 void JumpIfInstanceTypeIsNotSequentialAscii(
650 Register instance_type, 653 Register instance_type,
651 Register scratch, 654 Register scratch,
652 LabelType *on_not_flat_ascii_string); 655 Label*on_not_flat_ascii_string,
656 Label::Distance near_jump = Label::kFar);
653 657
654 template <typename LabelType>
655 void JumpIfBothInstanceTypesAreNotSequentialAscii( 658 void JumpIfBothInstanceTypesAreNotSequentialAscii(
656 Register first_object_instance_type, 659 Register first_object_instance_type,
657 Register second_object_instance_type, 660 Register second_object_instance_type,
658 Register scratch1, 661 Register scratch1,
659 Register scratch2, 662 Register scratch2,
660 LabelType* on_fail); 663 Label* on_fail,
664 Label::Distance near_jump = Label::kFar);
661 665
662 // --------------------------------------------------------------------------- 666 // ---------------------------------------------------------------------------
663 // Macro instructions. 667 // Macro instructions.
664 668
665 // Load a register with a long value as efficiently as possible. 669 // Load a register with a long value as efficiently as possible.
666 void Set(Register dst, int64_t x); 670 void Set(Register dst, int64_t x);
667 void Set(const Operand& dst, int64_t x); 671 void Set(const Operand& dst, int64_t x);
668 672
669 // Move if the registers are not identical. 673 // Move if the registers are not identical.
670 void Move(Register target, Register source); 674 void Move(Register target, Register source);
(...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after
1096 1100
1097 static int SafepointRegisterStackIndex(Register reg) { 1101 static int SafepointRegisterStackIndex(Register reg) {
1098 return SafepointRegisterStackIndex(reg.code()); 1102 return SafepointRegisterStackIndex(reg.code());
1099 } 1103 }
1100 1104
1101 private: 1105 private:
1102 // Order general registers are pushed by Pushad. 1106 // Order general registers are pushed by Pushad.
1103 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15. 1107 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
1104 static int kSafepointPushRegisterIndices[Register::kNumRegisters]; 1108 static int kSafepointPushRegisterIndices[Register::kNumRegisters];
1105 static const int kNumSafepointSavedRegisters = 11; 1109 static const int kNumSafepointSavedRegisters = 11;
1110 static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
1106 1111
1107 bool generating_stub_; 1112 bool generating_stub_;
1108 bool allow_stub_calls_; 1113 bool allow_stub_calls_;
1109 bool root_array_available_; 1114 bool root_array_available_;
1110 1115
1111 // Returns a register holding the smi value. The register MUST NOT be 1116 // Returns a register holding the smi value. The register MUST NOT be
1112 // modified. It may be the "smi 1 constant" register. 1117 // modified. It may be the "smi 1 constant" register.
1113 Register GetSmiConstant(Smi* value); 1118 Register GetSmiConstant(Smi* value);
1114 1119
1115 // Moves the smi value to the destination register. 1120 // Moves the smi value to the destination register.
1116 void LoadSmiConstant(Register dst, Smi* value); 1121 void LoadSmiConstant(Register dst, Smi* value);
1117 1122
1118 // This handle will be patched with the code object on installation. 1123 // This handle will be patched with the code object on installation.
1119 Handle<Object> code_object_; 1124 Handle<Object> code_object_;
1120 1125
1121 // Helper functions for generating invokes. 1126 // Helper functions for generating invokes.
1122 template <typename LabelType>
1123 void InvokePrologue(const ParameterCount& expected, 1127 void InvokePrologue(const ParameterCount& expected,
1124 const ParameterCount& actual, 1128 const ParameterCount& actual,
1125 Handle<Code> code_constant, 1129 Handle<Code> code_constant,
1126 Register code_register, 1130 Register code_register,
1127 LabelType* done, 1131 Label* done,
1128 InvokeFlag flag, 1132 InvokeFlag flag,
1129 const CallWrapper& call_wrapper); 1133 const CallWrapper& call_wrapper,
1134 Label::Distance near_jump = Label::kFar);
1130 1135
1131 // Activation support. 1136 // Activation support.
1132 void EnterFrame(StackFrame::Type type); 1137 void EnterFrame(StackFrame::Type type);
1133 void LeaveFrame(StackFrame::Type type); 1138 void LeaveFrame(StackFrame::Type type);
1134 1139
1135 void EnterExitFramePrologue(bool save_rax); 1140 void EnterExitFramePrologue(bool save_rax);
1136 1141
1137 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack 1142 // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
1138 // accessible via StackSpaceOperand. 1143 // accessible via StackSpaceOperand.
1139 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles); 1144 void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
1245 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \ 1250 masm->call(x64_coverage_function, RelocInfo::RUNTIME_ENTRY); \
1246 masm->pop(rax); \ 1251 masm->pop(rax); \
1247 masm->popad(); \ 1252 masm->popad(); \
1248 masm->popfd(); \ 1253 masm->popfd(); \
1249 } \ 1254 } \
1250 masm-> 1255 masm->
1251 #else 1256 #else
1252 #define ACCESS_MASM(masm) masm-> 1257 #define ACCESS_MASM(masm) masm->
1253 #endif 1258 #endif
1254 1259
1255 // -----------------------------------------------------------------------------
1256 // Template implementations.
1257
1258 static int kSmiShift = kSmiTagSize + kSmiShiftSize;
1259
1260
1261 template <typename LabelType>
1262 void MacroAssembler::SmiNeg(Register dst,
1263 Register src,
1264 LabelType* on_smi_result) {
1265 if (dst.is(src)) {
1266 ASSERT(!dst.is(kScratchRegister));
1267 movq(kScratchRegister, src);
1268 neg(dst); // Low 32 bits are retained as zero by negation.
1269 // Test if result is zero or Smi::kMinValue.
1270 cmpq(dst, kScratchRegister);
1271 j(not_equal, on_smi_result);
1272 movq(src, kScratchRegister);
1273 } else {
1274 movq(dst, src);
1275 neg(dst);
1276 cmpq(dst, src);
1277 // If the result is zero or Smi::kMinValue, negation failed to create a smi.
1278 j(not_equal, on_smi_result);
1279 }
1280 }
1281
1282
1283 template <typename LabelType>
1284 void MacroAssembler::SmiAdd(Register dst,
1285 Register src1,
1286 Register src2,
1287 LabelType* on_not_smi_result) {
1288 ASSERT_NOT_NULL(on_not_smi_result);
1289 ASSERT(!dst.is(src2));
1290 if (dst.is(src1)) {
1291 movq(kScratchRegister, src1);
1292 addq(kScratchRegister, src2);
1293 j(overflow, on_not_smi_result);
1294 movq(dst, kScratchRegister);
1295 } else {
1296 movq(dst, src1);
1297 addq(dst, src2);
1298 j(overflow, on_not_smi_result);
1299 }
1300 }
1301
1302
1303 template <typename LabelType>
1304 void MacroAssembler::SmiAdd(Register dst,
1305 Register src1,
1306 const Operand& src2,
1307 LabelType* on_not_smi_result) {
1308 ASSERT_NOT_NULL(on_not_smi_result);
1309 if (dst.is(src1)) {
1310 movq(kScratchRegister, src1);
1311 addq(kScratchRegister, src2);
1312 j(overflow, on_not_smi_result);
1313 movq(dst, kScratchRegister);
1314 } else {
1315 ASSERT(!src2.AddressUsesRegister(dst));
1316 movq(dst, src1);
1317 addq(dst, src2);
1318 j(overflow, on_not_smi_result);
1319 }
1320 }
1321
1322
1323 template <typename LabelType>
1324 void MacroAssembler::SmiSub(Register dst,
1325 Register src1,
1326 Register src2,
1327 LabelType* on_not_smi_result) {
1328 ASSERT_NOT_NULL(on_not_smi_result);
1329 ASSERT(!dst.is(src2));
1330 if (dst.is(src1)) {
1331 cmpq(dst, src2);
1332 j(overflow, on_not_smi_result);
1333 subq(dst, src2);
1334 } else {
1335 movq(dst, src1);
1336 subq(dst, src2);
1337 j(overflow, on_not_smi_result);
1338 }
1339 }
1340
1341
1342 template <typename LabelType>
1343 void MacroAssembler::SmiSub(Register dst,
1344 Register src1,
1345 const Operand& src2,
1346 LabelType* on_not_smi_result) {
1347 ASSERT_NOT_NULL(on_not_smi_result);
1348 if (dst.is(src1)) {
1349 movq(kScratchRegister, src2);
1350 cmpq(src1, kScratchRegister);
1351 j(overflow, on_not_smi_result);
1352 subq(src1, kScratchRegister);
1353 } else {
1354 movq(dst, src1);
1355 subq(dst, src2);
1356 j(overflow, on_not_smi_result);
1357 }
1358 }
1359
1360
1361 template <typename LabelType>
1362 void MacroAssembler::SmiMul(Register dst,
1363 Register src1,
1364 Register src2,
1365 LabelType* on_not_smi_result) {
1366 ASSERT(!dst.is(src2));
1367 ASSERT(!dst.is(kScratchRegister));
1368 ASSERT(!src1.is(kScratchRegister));
1369 ASSERT(!src2.is(kScratchRegister));
1370
1371 if (dst.is(src1)) {
1372 Label failure, zero_correct_result;
1373 movq(kScratchRegister, src1); // Create backup for later testing.
1374 SmiToInteger64(dst, src1);
1375 imul(dst, src2);
1376 j(overflow, &failure, Label::kNear);
1377
1378 // Check for negative zero result. If product is zero, and one
1379 // argument is negative, go to slow case.
1380 Label correct_result;
1381 testq(dst, dst);
1382 j(not_zero, &correct_result, Label::kNear);
1383
1384 movq(dst, kScratchRegister);
1385 xor_(dst, src2);
1386 // Result was positive zero.
1387 j(positive, &zero_correct_result, Label::kNear);
1388
1389 bind(&failure); // Reused failure exit, restores src1.
1390 movq(src1, kScratchRegister);
1391 jmp(on_not_smi_result);
1392
1393 bind(&zero_correct_result);
1394 Set(dst, 0);
1395
1396 bind(&correct_result);
1397 } else {
1398 SmiToInteger64(dst, src1);
1399 imul(dst, src2);
1400 j(overflow, on_not_smi_result);
1401 // Check for negative zero result. If product is zero, and one
1402 // argument is negative, go to slow case.
1403 Label correct_result;
1404 testq(dst, dst);
1405 j(not_zero, &correct_result, Label::kNear);
1406 // One of src1 and src2 is zero, the check whether the other is
1407 // negative.
1408 movq(kScratchRegister, src1);
1409 xor_(kScratchRegister, src2);
1410 j(negative, on_not_smi_result);
1411 bind(&correct_result);
1412 }
1413 }
1414
1415
1416 template <typename LabelType>
1417 void MacroAssembler::SmiTryAddConstant(Register dst,
1418 Register src,
1419 Smi* constant,
1420 LabelType* on_not_smi_result) {
1421 // Does not assume that src is a smi.
1422 ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
1423 ASSERT_EQ(0, kSmiTag);
1424 ASSERT(!dst.is(kScratchRegister));
1425 ASSERT(!src.is(kScratchRegister));
1426
1427 JumpIfNotSmi(src, on_not_smi_result);
1428 Register tmp = (dst.is(src) ? kScratchRegister : dst);
1429 LoadSmiConstant(tmp, constant);
1430 addq(tmp, src);
1431 j(overflow, on_not_smi_result);
1432 if (dst.is(src)) {
1433 movq(dst, tmp);
1434 }
1435 }
1436
1437
1438 template <typename LabelType>
1439 void MacroAssembler::SmiAddConstant(Register dst,
1440 Register src,
1441 Smi* constant,
1442 LabelType* on_not_smi_result) {
1443 if (constant->value() == 0) {
1444 if (!dst.is(src)) {
1445 movq(dst, src);
1446 }
1447 } else if (dst.is(src)) {
1448 ASSERT(!dst.is(kScratchRegister));
1449
1450 LoadSmiConstant(kScratchRegister, constant);
1451 addq(kScratchRegister, src);
1452 j(overflow, on_not_smi_result);
1453 movq(dst, kScratchRegister);
1454 } else {
1455 LoadSmiConstant(dst, constant);
1456 addq(dst, src);
1457 j(overflow, on_not_smi_result);
1458 }
1459 }
1460
1461
1462 template <typename LabelType>
1463 void MacroAssembler::SmiSubConstant(Register dst,
1464 Register src,
1465 Smi* constant,
1466 LabelType* on_not_smi_result) {
1467 if (constant->value() == 0) {
1468 if (!dst.is(src)) {
1469 movq(dst, src);
1470 }
1471 } else if (dst.is(src)) {
1472 ASSERT(!dst.is(kScratchRegister));
1473 if (constant->value() == Smi::kMinValue) {
1474 // Subtracting min-value from any non-negative value will overflow.
1475 // We test the non-negativeness before doing the subtraction.
1476 testq(src, src);
1477 j(not_sign, on_not_smi_result);
1478 LoadSmiConstant(kScratchRegister, constant);
1479 subq(dst, kScratchRegister);
1480 } else {
1481 // Subtract by adding the negation.
1482 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value()));
1483 addq(kScratchRegister, dst);
1484 j(overflow, on_not_smi_result);
1485 movq(dst, kScratchRegister);
1486 }
1487 } else {
1488 if (constant->value() == Smi::kMinValue) {
1489 // Subtracting min-value from any non-negative value will overflow.
1490 // We test the non-negativeness before doing the subtraction.
1491 testq(src, src);
1492 j(not_sign, on_not_smi_result);
1493 LoadSmiConstant(dst, constant);
1494 // Adding and subtracting the min-value gives the same result, it only
1495 // differs on the overflow bit, which we don't check here.
1496 addq(dst, src);
1497 } else {
1498 // Subtract by adding the negation.
1499 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1500 addq(dst, src);
1501 j(overflow, on_not_smi_result);
1502 }
1503 }
1504 }
1505
1506
1507 template <typename LabelType>
1508 void MacroAssembler::SmiDiv(Register dst,
1509 Register src1,
1510 Register src2,
1511 LabelType* on_not_smi_result) {
1512 ASSERT(!src1.is(kScratchRegister));
1513 ASSERT(!src2.is(kScratchRegister));
1514 ASSERT(!dst.is(kScratchRegister));
1515 ASSERT(!src2.is(rax));
1516 ASSERT(!src2.is(rdx));
1517 ASSERT(!src1.is(rdx));
1518
1519 // Check for 0 divisor (result is +/-Infinity).
1520 testq(src2, src2);
1521 j(zero, on_not_smi_result);
1522
1523 if (src1.is(rax)) {
1524 movq(kScratchRegister, src1);
1525 }
1526 SmiToInteger32(rax, src1);
1527 // We need to rule out dividing Smi::kMinValue by -1, since that would
1528 // overflow in idiv and raise an exception.
1529 // We combine this with negative zero test (negative zero only happens
1530 // when dividing zero by a negative number).
1531
1532 // We overshoot a little and go to slow case if we divide min-value
1533 // by any negative value, not just -1.
1534 Label safe_div;
1535 testl(rax, Immediate(0x7fffffff));
1536 j(not_zero, &safe_div, Label::kNear);
1537 testq(src2, src2);
1538 if (src1.is(rax)) {
1539 j(positive, &safe_div, Label::kNear);
1540 movq(src1, kScratchRegister);
1541 jmp(on_not_smi_result);
1542 } else {
1543 j(negative, on_not_smi_result);
1544 }
1545 bind(&safe_div);
1546
1547 SmiToInteger32(src2, src2);
1548 // Sign extend src1 into edx:eax.
1549 cdq();
1550 idivl(src2);
1551 Integer32ToSmi(src2, src2);
1552 // Check that the remainder is zero.
1553 testl(rdx, rdx);
1554 if (src1.is(rax)) {
1555 Label smi_result;
1556 j(zero, &smi_result, Label::kNear);
1557 movq(src1, kScratchRegister);
1558 jmp(on_not_smi_result);
1559 bind(&smi_result);
1560 } else {
1561 j(not_zero, on_not_smi_result);
1562 }
1563 if (!dst.is(src1) && src1.is(rax)) {
1564 movq(src1, kScratchRegister);
1565 }
1566 Integer32ToSmi(dst, rax);
1567 }
1568
1569
1570 template <typename LabelType>
1571 void MacroAssembler::SmiMod(Register dst,
1572 Register src1,
1573 Register src2,
1574 LabelType* on_not_smi_result) {
1575 ASSERT(!dst.is(kScratchRegister));
1576 ASSERT(!src1.is(kScratchRegister));
1577 ASSERT(!src2.is(kScratchRegister));
1578 ASSERT(!src2.is(rax));
1579 ASSERT(!src2.is(rdx));
1580 ASSERT(!src1.is(rdx));
1581 ASSERT(!src1.is(src2));
1582
1583 testq(src2, src2);
1584 j(zero, on_not_smi_result);
1585
1586 if (src1.is(rax)) {
1587 movq(kScratchRegister, src1);
1588 }
1589 SmiToInteger32(rax, src1);
1590 SmiToInteger32(src2, src2);
1591
1592 // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
1593 Label safe_div;
1594 cmpl(rax, Immediate(Smi::kMinValue));
1595 j(not_equal, &safe_div, Label::kNear);
1596 cmpl(src2, Immediate(-1));
1597 j(not_equal, &safe_div, Label::kNear);
1598 // Retag inputs and go slow case.
1599 Integer32ToSmi(src2, src2);
1600 if (src1.is(rax)) {
1601 movq(src1, kScratchRegister);
1602 }
1603 jmp(on_not_smi_result);
1604 bind(&safe_div);
1605
1606 // Sign extend eax into edx:eax.
1607 cdq();
1608 idivl(src2);
1609 // Restore smi tags on inputs.
1610 Integer32ToSmi(src2, src2);
1611 if (src1.is(rax)) {
1612 movq(src1, kScratchRegister);
1613 }
1614 // Check for a negative zero result. If the result is zero, and the
1615 // dividend is negative, go slow to return a floating point negative zero.
1616 Label smi_result;
1617 testl(rdx, rdx);
1618 j(not_zero, &smi_result, Label::kNear);
1619 testq(src1, src1);
1620 j(negative, on_not_smi_result);
1621 bind(&smi_result);
1622 Integer32ToSmi(dst, rdx);
1623 }
1624
1625
1626 template <typename LabelType>
1627 void MacroAssembler::SmiShiftLogicalRightConstant(
1628 Register dst, Register src, int shift_value, LabelType* on_not_smi_result) {
1629 // Logic right shift interprets its result as an *unsigned* number.
1630 if (dst.is(src)) {
1631 UNIMPLEMENTED(); // Not used.
1632 } else {
1633 movq(dst, src);
1634 if (shift_value == 0) {
1635 testq(dst, dst);
1636 j(negative, on_not_smi_result);
1637 }
1638 shr(dst, Immediate(shift_value + kSmiShift));
1639 shl(dst, Immediate(kSmiShift));
1640 }
1641 }
1642
1643
1644 template <typename LabelType>
1645 void MacroAssembler::SmiShiftLogicalRight(Register dst,
1646 Register src1,
1647 Register src2,
1648 LabelType* on_not_smi_result) {
1649 ASSERT(!dst.is(kScratchRegister));
1650 ASSERT(!src1.is(kScratchRegister));
1651 ASSERT(!src2.is(kScratchRegister));
1652 ASSERT(!dst.is(rcx));
1653 // dst and src1 can be the same, because the one case that bails out
1654 // is a shift by 0, which leaves dst, and therefore src1, unchanged.
1655 if (src1.is(rcx) || src2.is(rcx)) {
1656 movq(kScratchRegister, rcx);
1657 }
1658 if (!dst.is(src1)) {
1659 movq(dst, src1);
1660 }
1661 SmiToInteger32(rcx, src2);
1662 orl(rcx, Immediate(kSmiShift));
1663 shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1664 shl(dst, Immediate(kSmiShift));
1665 testq(dst, dst);
1666 if (src1.is(rcx) || src2.is(rcx)) {
1667 Label positive_result;
1668 j(positive, &positive_result, Label::kNear);
1669 if (src1.is(rcx)) {
1670 movq(src1, kScratchRegister);
1671 } else {
1672 movq(src2, kScratchRegister);
1673 }
1674 jmp(on_not_smi_result);
1675 bind(&positive_result);
1676 } else {
1677 j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1678 }
1679 }
1680
1681
1682 template <typename LabelType>
1683 void MacroAssembler::SelectNonSmi(Register dst,
1684 Register src1,
1685 Register src2,
1686 LabelType* on_not_smis) {
1687 ASSERT(!dst.is(kScratchRegister));
1688 ASSERT(!src1.is(kScratchRegister));
1689 ASSERT(!src2.is(kScratchRegister));
1690 ASSERT(!dst.is(src1));
1691 ASSERT(!dst.is(src2));
1692 // Both operands must not be smis.
1693 #ifdef DEBUG
1694 if (allow_stub_calls()) { // Check contains a stub call.
1695 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1696 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1697 }
1698 #endif
1699 ASSERT_EQ(0, kSmiTag);
1700 ASSERT_EQ(0, Smi::FromInt(0));
1701 movl(kScratchRegister, Immediate(kSmiTagMask));
1702 and_(kScratchRegister, src1);
1703 testl(kScratchRegister, src2);
1704 // If non-zero then both are smis.
1705 j(not_zero, on_not_smis);
1706
1707 // Exactly one operand is a smi.
1708 ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1709 // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1710 subq(kScratchRegister, Immediate(1));
1711 // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1712 movq(dst, src1);
1713 xor_(dst, src2);
1714 and_(dst, kScratchRegister);
1715 // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1716 xor_(dst, src1);
1717 // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1718 }
1719
1720
1721 template <typename LabelType>
1722 void MacroAssembler::JumpIfSmi(Register src, LabelType* on_smi) {
1723 ASSERT_EQ(0, kSmiTag);
1724 Condition smi = CheckSmi(src);
1725 j(smi, on_smi);
1726 }
1727
1728
1729 template <typename LabelType>
1730 void MacroAssembler::JumpIfNotSmi(Register src, LabelType* on_not_smi) {
1731 Condition smi = CheckSmi(src);
1732 j(NegateCondition(smi), on_not_smi);
1733 }
1734
1735
1736 template <typename LabelType>
1737 void MacroAssembler::JumpUnlessNonNegativeSmi(
1738 Register src, LabelType* on_not_smi_or_negative) {
1739 Condition non_negative_smi = CheckNonNegativeSmi(src);
1740 j(NegateCondition(non_negative_smi), on_not_smi_or_negative);
1741 }
1742
1743
1744 template <typename LabelType>
1745 void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1746 Smi* constant,
1747 LabelType* on_equals) {
1748 SmiCompare(src, constant);
1749 j(equal, on_equals);
1750 }
1751
1752
1753 template <typename LabelType>
1754 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1755 LabelType* on_invalid) {
1756 Condition is_valid = CheckInteger32ValidSmiValue(src);
1757 j(NegateCondition(is_valid), on_invalid);
1758 }
1759
1760
1761 template <typename LabelType>
1762 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1763 LabelType* on_invalid) {
1764 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1765 j(NegateCondition(is_valid), on_invalid);
1766 }
1767
1768
1769 template <typename LabelType>
1770 void MacroAssembler::JumpIfNotBothSmi(Register src1,
1771 Register src2,
1772 LabelType* on_not_both_smi) {
1773 Condition both_smi = CheckBothSmi(src1, src2);
1774 j(NegateCondition(both_smi), on_not_both_smi);
1775 }
1776
1777
1778 template <typename LabelType>
1779 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1780 Register src2,
1781 LabelType* on_not_both_smi) {
1782 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1783 j(NegateCondition(both_smi), on_not_both_smi);
1784 }
1785
1786
1787 template <typename LabelType>
1788 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1789 LabelType* on_not_smis) {
1790 if (dst.is(src1) || dst.is(src2)) {
1791 ASSERT(!src1.is(kScratchRegister));
1792 ASSERT(!src2.is(kScratchRegister));
1793 movq(kScratchRegister, src1);
1794 or_(kScratchRegister, src2);
1795 JumpIfNotSmi(kScratchRegister, on_not_smis);
1796 movq(dst, kScratchRegister);
1797 } else {
1798 movq(dst, src1);
1799 or_(dst, src2);
1800 JumpIfNotSmi(dst, on_not_smis);
1801 }
1802 }
1803
1804
1805 template <typename LabelType>
1806 void MacroAssembler::JumpIfNotString(Register object,
1807 Register object_map,
1808 LabelType* not_string) {
1809 Condition is_smi = CheckSmi(object);
1810 j(is_smi, not_string);
1811 CmpObjectType(object, FIRST_NONSTRING_TYPE, object_map);
1812 j(above_equal, not_string);
1813 }
1814
1815
1816 template <typename LabelType>
1817 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object,
1818 Register second_object,
1819 Register scratch1,
1820 Register scratch2,
1821 LabelType* on_fail) {
1822 // Check that both objects are not smis.
1823 Condition either_smi = CheckEitherSmi(first_object, second_object);
1824 j(either_smi, on_fail);
1825
1826 // Load instance type for both strings.
1827 movq(scratch1, FieldOperand(first_object, HeapObject::kMapOffset));
1828 movq(scratch2, FieldOperand(second_object, HeapObject::kMapOffset));
1829 movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
1830 movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
1831
1832 // Check that both are flat ascii strings.
1833 ASSERT(kNotStringTag != 0);
1834 const int kFlatAsciiStringMask =
1835 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1836 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1837
1838 andl(scratch1, Immediate(kFlatAsciiStringMask));
1839 andl(scratch2, Immediate(kFlatAsciiStringMask));
1840 // Interleave the bits to check both scratch1 and scratch2 in one test.
1841 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1842 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1843 cmpl(scratch1,
1844 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1845 j(not_equal, on_fail);
1846 }
1847
1848
1849 template <typename LabelType>
1850 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
1851 Register instance_type,
1852 Register scratch,
1853 LabelType *failure) {
1854 if (!scratch.is(instance_type)) {
1855 movl(scratch, instance_type);
1856 }
1857
1858 const int kFlatAsciiStringMask =
1859 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1860
1861 andl(scratch, Immediate(kFlatAsciiStringMask));
1862 cmpl(scratch, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
1863 j(not_equal, failure);
1864 }
1865
1866
1867 template <typename LabelType>
1868 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
1869 Register first_object_instance_type,
1870 Register second_object_instance_type,
1871 Register scratch1,
1872 Register scratch2,
1873 LabelType* on_fail) {
1874 // Load instance type for both strings.
1875 movq(scratch1, first_object_instance_type);
1876 movq(scratch2, second_object_instance_type);
1877
1878 // Check that both are flat ascii strings.
1879 ASSERT(kNotStringTag != 0);
1880 const int kFlatAsciiStringMask =
1881 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
1882 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
1883
1884 andl(scratch1, Immediate(kFlatAsciiStringMask));
1885 andl(scratch2, Immediate(kFlatAsciiStringMask));
1886 // Interleave the bits to check both scratch1 and scratch2 in one test.
1887 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
1888 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
1889 cmpl(scratch1,
1890 Immediate(kFlatAsciiStringTag + (kFlatAsciiStringTag << 3)));
1891 j(not_equal, on_fail);
1892 }
1893
1894
1895 template <typename LabelType>
1896 void MacroAssembler::InNewSpace(Register object,
1897 Register scratch,
1898 Condition cc,
1899 LabelType* branch) {
1900 if (Serializer::enabled()) {
1901 // Can't do arithmetic on external references if it might get serialized.
1902 // The mask isn't really an address. We load it as an external reference in
1903 // case the size of the new space is different between the snapshot maker
1904 // and the running system.
1905 if (scratch.is(object)) {
1906 movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
1907 and_(scratch, kScratchRegister);
1908 } else {
1909 movq(scratch, ExternalReference::new_space_mask(isolate()));
1910 and_(scratch, object);
1911 }
1912 movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
1913 cmpq(scratch, kScratchRegister);
1914 j(cc, branch);
1915 } else {
1916 ASSERT(is_int32(static_cast<int64_t>(HEAP->NewSpaceMask())));
1917 intptr_t new_space_start =
1918 reinterpret_cast<intptr_t>(HEAP->NewSpaceStart());
1919 movq(kScratchRegister, -new_space_start, RelocInfo::NONE);
1920 if (scratch.is(object)) {
1921 addq(scratch, kScratchRegister);
1922 } else {
1923 lea(scratch, Operand(object, kScratchRegister, times_1, 0));
1924 }
1925 and_(scratch, Immediate(static_cast<int32_t>(HEAP->NewSpaceMask())));
1926 j(cc, branch);
1927 }
1928 }
1929
1930
1931 template <typename LabelType>
1932 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1933 const ParameterCount& actual,
1934 Handle<Code> code_constant,
1935 Register code_register,
1936 LabelType* done,
1937 InvokeFlag flag,
1938 const CallWrapper& call_wrapper) {
1939 bool definitely_matches = false;
1940 Label invoke;
1941 if (expected.is_immediate()) {
1942 ASSERT(actual.is_immediate());
1943 if (expected.immediate() == actual.immediate()) {
1944 definitely_matches = true;
1945 } else {
1946 Set(rax, actual.immediate());
1947 if (expected.immediate() ==
1948 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1949 // Don't worry about adapting arguments for built-ins that
1950 // don't want that done. Skip adaption code by making it look
1951 // like we have a match between expected and actual number of
1952 // arguments.
1953 definitely_matches = true;
1954 } else {
1955 Set(rbx, expected.immediate());
1956 }
1957 }
1958 } else {
1959 if (actual.is_immediate()) {
1960 // Expected is in register, actual is immediate. This is the
1961 // case when we invoke function values without going through the
1962 // IC mechanism.
1963 cmpq(expected.reg(), Immediate(actual.immediate()));
1964 j(equal, &invoke, Label::kNear);
1965 ASSERT(expected.reg().is(rbx));
1966 Set(rax, actual.immediate());
1967 } else if (!expected.reg().is(actual.reg())) {
1968 // Both expected and actual are in (different) registers. This
1969 // is the case when we invoke functions using call and apply.
1970 cmpq(expected.reg(), actual.reg());
1971 j(equal, &invoke, Label::kNear);
1972 ASSERT(actual.reg().is(rax));
1973 ASSERT(expected.reg().is(rbx));
1974 }
1975 }
1976
1977 if (!definitely_matches) {
1978 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
1979 if (!code_constant.is_null()) {
1980 movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1981 addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1982 } else if (!code_register.is(rdx)) {
1983 movq(rdx, code_register);
1984 }
1985
1986 if (flag == CALL_FUNCTION) {
1987 call_wrapper.BeforeCall(CallSize(adaptor));
1988 Call(adaptor, RelocInfo::CODE_TARGET);
1989 call_wrapper.AfterCall();
1990 jmp(done);
1991 } else {
1992 Jump(adaptor, RelocInfo::CODE_TARGET);
1993 }
1994 bind(&invoke);
1995 }
1996 }
1997
1998
1999 } } // namespace v8::internal 1260 } } // namespace v8::internal
2000 1261
2001 #endif // V8_X64_MACRO_ASSEMBLER_X64_H_ 1262 #endif // V8_X64_MACRO_ASSEMBLER_X64_H_
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.cc ('k') | src/x64/macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698