| OLD | NEW | 
|---|
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 563 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 574   if (first.is(second)) { | 574   if (first.is(second)) { | 
| 575     return CheckSmi(first); | 575     return CheckSmi(first); | 
| 576   } | 576   } | 
| 577   movl(kScratchRegister, first); | 577   movl(kScratchRegister, first); | 
| 578   orl(kScratchRegister, second); | 578   orl(kScratchRegister, second); | 
| 579   testb(kScratchRegister, Immediate(kSmiTagMask)); | 579   testb(kScratchRegister, Immediate(kSmiTagMask)); | 
| 580   return zero; | 580   return zero; | 
| 581 } | 581 } | 
| 582 | 582 | 
| 583 | 583 | 
|  | 584 Condition MacroAssembler::CheckBothPositiveSmi(Register first, | 
|  | 585                                                Register second) { | 
|  | 586   if (first.is(second)) { | 
|  | 587     return CheckPositiveSmi(first); | 
|  | 588   } | 
|  | 589   movl(kScratchRegister, first); | 
|  | 590   orl(kScratchRegister, second); | 
|  | 591   rol(kScratchRegister, Immediate(1)); | 
|  | 592   testl(kScratchRegister, Immediate(0x03)); | 
|  | 593   return zero; | 
|  | 594 } | 
|  | 595 | 
|  | 596 | 
|  | 597 | 
| 584 Condition MacroAssembler::CheckEitherSmi(Register first, Register second) { | 598 Condition MacroAssembler::CheckEitherSmi(Register first, Register second) { | 
| 585   if (first.is(second)) { | 599   if (first.is(second)) { | 
| 586     return CheckSmi(first); | 600     return CheckSmi(first); | 
| 587   } | 601   } | 
| 588   movl(kScratchRegister, first); | 602   movl(kScratchRegister, first); | 
| 589   andl(kScratchRegister, second); | 603   andl(kScratchRegister, second); | 
| 590   testb(kScratchRegister, Immediate(kSmiTagMask)); | 604   testb(kScratchRegister, Immediate(kSmiTagMask)); | 
| 591   return zero; | 605   return zero; | 
| 592 } | 606 } | 
| 593 | 607 | 
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 653     j(overflow, on_not_smi_result); | 667     j(overflow, on_not_smi_result); | 
| 654   } | 668   } | 
| 655 } | 669 } | 
| 656 | 670 | 
| 657 | 671 | 
| 658 void MacroAssembler::SmiSub(Register dst, | 672 void MacroAssembler::SmiSub(Register dst, | 
| 659                             Register src1, | 673                             Register src1, | 
| 660                             Register src2, | 674                             Register src2, | 
| 661                             Label* on_not_smi_result) { | 675                             Label* on_not_smi_result) { | 
| 662   ASSERT(!dst.is(src2)); | 676   ASSERT(!dst.is(src2)); | 
| 663   if (dst.is(src1)) { | 677   if (on_not_smi_result == NULL) { | 
|  | 678     // No overflow checking. Use only when it's known that | 
|  | 679     // overflowing is impossible (e.g., subtracting two positive smis). | 
|  | 680     if (dst.is(src1)) { | 
|  | 681       subq(dst, src2); | 
|  | 682     } else { | 
|  | 683       movq(dst, src1); | 
|  | 684       subq(dst, src2); | 
|  | 685     } | 
|  | 686     Assert(no_overflow, "Smi substraction onverflow"); | 
|  | 687   } else if (dst.is(src1)) { | 
| 664     subq(dst, src2); | 688     subq(dst, src2); | 
| 665     Label smi_result; | 689     Label smi_result; | 
| 666     j(no_overflow, &smi_result); | 690     j(no_overflow, &smi_result); | 
| 667     // Restore src1. | 691     // Restore src1. | 
| 668     addq(src1, src2); | 692     addq(src1, src2); | 
| 669     jmp(on_not_smi_result); | 693     jmp(on_not_smi_result); | 
| 670     bind(&smi_result); | 694     bind(&smi_result); | 
| 671   } else { | 695   } else { | 
| 672     movq(dst, src1); | 696     movq(dst, src1); | 
| 673     subq(dst, src2); | 697     subq(dst, src2); | 
| (...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1285 } | 1309 } | 
| 1286 | 1310 | 
| 1287 | 1311 | 
| 1288 void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2, | 1312 void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2, | 
| 1289                                       Label* on_not_both_smi) { | 1313                                       Label* on_not_both_smi) { | 
| 1290   Condition both_smi = CheckBothSmi(src1, src2); | 1314   Condition both_smi = CheckBothSmi(src1, src2); | 
| 1291   j(NegateCondition(both_smi), on_not_both_smi); | 1315   j(NegateCondition(both_smi), on_not_both_smi); | 
| 1292 } | 1316 } | 
| 1293 | 1317 | 
| 1294 | 1318 | 
|  | 1319 void MacroAssembler::JumpIfNotBothPositiveSmi(Register src1, Register src2, | 
|  | 1320                                               Label* on_not_both_smi) { | 
|  | 1321   Condition both_smi = CheckBothPositiveSmi(src1, src2); | 
|  | 1322   j(NegateCondition(both_smi), on_not_both_smi); | 
|  | 1323 } | 
|  | 1324 | 
|  | 1325 | 
|  | 1326 | 
| 1295 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object, | 1327 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first_object, | 
| 1296                                                          Register second_object, | 1328                                                          Register second_object, | 
| 1297                                                          Register scratch1, | 1329                                                          Register scratch1, | 
| 1298                                                          Register scratch2, | 1330                                                          Register scratch2, | 
| 1299                                                          Label* on_fail) { | 1331                                                          Label* on_fail) { | 
| 1300   // Check that both objects are not smis. | 1332   // Check that both objects are not smis. | 
| 1301   Condition either_smi = CheckEitherSmi(first_object, second_object); | 1333   Condition either_smi = CheckEitherSmi(first_object, second_object); | 
| 1302   j(either_smi, on_fail); | 1334   j(either_smi, on_fail); | 
| 1303 | 1335 | 
| 1304   // Load instance type for both strings. | 1336   // Load instance type for both strings. | 
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1510   CmpInstanceType(map, type); | 1542   CmpInstanceType(map, type); | 
| 1511 } | 1543 } | 
| 1512 | 1544 | 
| 1513 | 1545 | 
| 1514 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { | 1546 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) { | 
| 1515   cmpb(FieldOperand(map, Map::kInstanceTypeOffset), | 1547   cmpb(FieldOperand(map, Map::kInstanceTypeOffset), | 
| 1516        Immediate(static_cast<int8_t>(type))); | 1548        Immediate(static_cast<int8_t>(type))); | 
| 1517 } | 1549 } | 
| 1518 | 1550 | 
| 1519 | 1551 | 
|  | 1552 Condition MacroAssembler::IsObjectStringType(Register heap_object, | 
|  | 1553                                              Register map, | 
|  | 1554                                              Register instance_type) { | 
|  | 1555   movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | 
|  | 1556   movzxbq(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); | 
|  | 1557   ASSERT(kNotStringTag != 0); | 
|  | 1558   testb(instance_type, Immediate(kIsNotStringMask)); | 
|  | 1559   return zero; | 
|  | 1560 } | 
|  | 1561 | 
|  | 1562 | 
| 1520 void MacroAssembler::TryGetFunctionPrototype(Register function, | 1563 void MacroAssembler::TryGetFunctionPrototype(Register function, | 
| 1521                                              Register result, | 1564                                              Register result, | 
| 1522                                              Label* miss) { | 1565                                              Label* miss) { | 
| 1523   // Check that the receiver isn't a smi. | 1566   // Check that the receiver isn't a smi. | 
| 1524   testl(function, Immediate(kSmiTagMask)); | 1567   testl(function, Immediate(kSmiTagMask)); | 
| 1525   j(zero, miss); | 1568   j(zero, miss); | 
| 1526 | 1569 | 
| 1527   // Check that the function really is a function. | 1570   // Check that the function really is a function. | 
| 1528   CmpObjectType(function, JS_FUNCTION_TYPE, result); | 1571   CmpObjectType(function, JS_FUNCTION_TYPE, result); | 
| 1529   j(not_equal, miss); | 1572   j(not_equal, miss); | 
| (...skipping 913 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2443 CodePatcher::~CodePatcher() { | 2486 CodePatcher::~CodePatcher() { | 
| 2444   // Indicate that code has changed. | 2487   // Indicate that code has changed. | 
| 2445   CPU::FlushICache(address_, size_); | 2488   CPU::FlushICache(address_, size_); | 
| 2446 | 2489 | 
| 2447   // Check that the code was patched as expected. | 2490   // Check that the code was patched as expected. | 
| 2448   ASSERT(masm_.pc_ == address_ + size_); | 2491   ASSERT(masm_.pc_ == address_ + size_); | 
| 2449   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2492   ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 
| 2450 } | 2493 } | 
| 2451 | 2494 | 
| 2452 } }  // namespace v8::internal | 2495 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|