OLD | NEW |
---|---|
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
370 bind(&ok); | 370 bind(&ok); |
371 } | 371 } |
372 | 372 |
373 | 373 |
374 void MacroAssembler::AbortIfNotSmi(Register object) { | 374 void MacroAssembler::AbortIfNotSmi(Register object) { |
375 test(object, Immediate(kSmiTagMask)); | 375 test(object, Immediate(kSmiTagMask)); |
376 Assert(equal, "Operand not a smi"); | 376 Assert(equal, "Operand not a smi"); |
377 } | 377 } |
378 | 378 |
379 | 379 |
380 void MacroAssembler::AbortIfSmi(Register object) { | |
381 test(object, Immediate(kSmiTagMask)); | |
382 Assert(not_equal, "Operand a smi"); | |
383 } | |
384 | |
385 | |
380 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 386 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
381 push(ebp); | 387 push(ebp); |
382 mov(ebp, Operand(esp)); | 388 mov(ebp, Operand(esp)); |
383 push(esi); | 389 push(esi); |
384 push(Immediate(Smi::FromInt(type))); | 390 push(Immediate(Smi::FromInt(type))); |
385 push(Immediate(CodeObject())); | 391 push(Immediate(CodeObject())); |
386 if (FLAG_debug_code) { | 392 if (FLAG_debug_code) { |
387 cmp(Operand(esp, 0), Immediate(Factory::undefined_value())); | 393 cmp(Operand(esp, 0), Immediate(Factory::undefined_value())); |
388 Check(not_equal, "code object not properly patched"); | 394 Check(not_equal, "code object not properly patched"); |
389 } | 395 } |
(...skipping 1100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1490 | 1496 |
1491 push(eax); | 1497 push(eax); |
1492 push(Immediate(p0)); | 1498 push(Immediate(p0)); |
1493 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); | 1499 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); |
1494 CallRuntime(Runtime::kAbort, 2); | 1500 CallRuntime(Runtime::kAbort, 2); |
1495 // will not return here | 1501 // will not return here |
1496 int3(); | 1502 int3(); |
1497 } | 1503 } |
1498 | 1504 |
1499 | 1505 |
1506 void MacroAssembler::JumpIfNotNumber(Register reg, | |
1507 TypeInfo info, | |
1508 Label* on_not_number) { | |
1509 if (FLAG_debug_code) AbortIfSmi(reg); | |
1510 if (!info.IsNumber()) { | |
1511 cmp(FieldOperand(reg, HeapObject::kMapOffset), | |
1512 Factory::heap_number_map()); | |
1513 j(not_equal, on_not_number); | |
1514 } | |
1515 } | |
1516 | |
1517 | |
1518 void MacroAssembler::JumpIfNotInt32(Register reg, | |
1519 Register scratch, | |
1520 TypeInfo info, | |
1521 Label* on_not_int32) { | |
1522 if (FLAG_debug_code) { | |
1523 AbortIfSmi(reg); | |
1524 AbortIfNotNumber(reg); | |
1525 } | |
1526 const uint32_t non_smi_exponent = | |
1527 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; | |
1528 if (!info.IsInteger32()) { | |
1529 bool push_pop = scratch.is(no_reg); | |
1530 if (push_pop) { | |
1531 push(reg); | |
1532 scratch = reg; | |
1533 } | |
1534 mov(scratch, FieldOperand(reg, HeapNumber::kExponentOffset)); | |
1535 and_(scratch, HeapNumber::kExponentMask); | |
1536 cmp(scratch, non_smi_exponent); | |
1537 if (push_pop) { | |
1538 pop(reg); | |
1539 } | |
1540 j(greater, on_not_int32); | |
Lasse Reichstein
2010/08/06 08:20:12
Is this faster than using SSE2 cvttsd2si and check
Erik Corry
2010/08/09 13:13:49
Done.
| |
1541 } | |
1542 } | |
1543 | |
1544 | |
1500 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( | 1545 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii( |
1501 Register instance_type, | 1546 Register instance_type, |
1502 Register scratch, | 1547 Register scratch, |
1503 Label* failure) { | 1548 Label* failure) { |
1504 if (!scratch.is(instance_type)) { | 1549 if (!scratch.is(instance_type)) { |
1505 mov(scratch, instance_type); | 1550 mov(scratch, instance_type); |
1506 } | 1551 } |
1507 and_(scratch, | 1552 and_(scratch, |
1508 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); | 1553 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); |
1509 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag); | 1554 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag); |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1598 | 1643 |
1599 // Check that the code was patched as expected. | 1644 // Check that the code was patched as expected. |
1600 ASSERT(masm_.pc_ == address_ + size_); | 1645 ASSERT(masm_.pc_ == address_ + size_); |
1601 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 1646 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
1602 } | 1647 } |
1603 | 1648 |
1604 | 1649 |
1605 } } // namespace v8::internal | 1650 } } // namespace v8::internal |
1606 | 1651 |
1607 #endif // V8_TARGET_ARCH_IA32 | 1652 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |