OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 6492 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6503 void GenericBinaryOpStub::GenerateCall( | 6503 void GenericBinaryOpStub::GenerateCall( |
6504 MacroAssembler* masm, | 6504 MacroAssembler* masm, |
6505 Register left, | 6505 Register left, |
6506 Register right) { | 6506 Register right) { |
6507 if (!ArgsInRegistersSupported()) { | 6507 if (!ArgsInRegistersSupported()) { |
6508 // Pass arguments on the stack. | 6508 // Pass arguments on the stack. |
6509 __ push(left); | 6509 __ push(left); |
6510 __ push(right); | 6510 __ push(right); |
6511 } else { | 6511 } else { |
6512 // The calling convention with registers is left in edx and right in eax. | 6512 // The calling convention with registers is left in edx and right in eax. |
6513 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); | 6513 Register left_arg = edx; |
6514 if (!(left.is(edx) && right.is(eax))) { | 6514 Register right_arg = eax; |
6515 if (left.is(eax) && right.is(edx)) { | 6515 if (!(left.is(left_arg) && right.is(right_arg))) { |
| 6516 if (left.is(right_arg) && right.is(left_arg)) { |
6516 if (IsOperationCommutative()) { | 6517 if (IsOperationCommutative()) { |
6517 SetArgsReversed(); | 6518 SetArgsReversed(); |
6518 } else { | 6519 } else { |
6519 __ xchg(left, right); | 6520 __ xchg(left, right); |
6520 } | 6521 } |
6521 } else if (left.is(edx)) { | 6522 } else if (left.is(left_arg)) { |
6522 __ mov(eax, right); | 6523 __ mov(right_arg, right); |
6523 } else if (left.is(eax)) { | 6524 } else if (left.is(right_arg)) { |
6524 if (IsOperationCommutative()) { | 6525 if (IsOperationCommutative()) { |
6525 __ mov(edx, right); | 6526 __ mov(left_arg, right); |
6526 SetArgsReversed(); | 6527 SetArgsReversed(); |
6527 } else { | 6528 } else { |
6528 __ mov(edx, left); | 6529 // Order of moves important to avoid destroying left argument. |
6529 __ mov(eax, right); | 6530 __ mov(left_arg, left); |
| 6531 __ mov(right_arg, right); |
6530 } | 6532 } |
6531 } else if (right.is(edx)) { | 6533 } else if (right.is(left_arg)) { |
6532 if (IsOperationCommutative()) { | 6534 if (IsOperationCommutative()) { |
6533 __ mov(eax, left); | 6535 __ mov(right_arg, left); |
6534 SetArgsReversed(); | 6536 SetArgsReversed(); |
6535 } else { | 6537 } else { |
6536 __ mov(eax, right); | 6538 // Order of moves important to avoid destroying right argument. |
6537 __ mov(edx, left); | 6539 __ mov(right_arg, right); |
| 6540 __ mov(left_arg, left); |
6538 } | 6541 } |
6539 } else if (right.is(eax)) { | 6542 } else if (right.is(right_arg)) { |
6540 __ mov(edx, left); | 6543 __ mov(left_arg, left); |
6541 } else { | 6544 } else { |
6542 __ mov(edx, left); | 6545 // Order of moves is not important. |
6543 __ mov(eax, right); | 6546 __ mov(left_arg, left); |
| 6547 __ mov(right_arg, right); |
6544 } | 6548 } |
6545 } | 6549 } |
6546 | 6550 |
6547 // Update flags to indicate that arguments are in registers. | 6551 // Update flags to indicate that arguments are in registers. |
6548 SetArgsInRegisters(); | 6552 SetArgsInRegisters(); |
| 6553 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); |
6549 } | 6554 } |
6550 | 6555 |
6551 // Call the stub. | 6556 // Call the stub. |
6552 __ CallStub(this); | 6557 __ CallStub(this); |
6553 } | 6558 } |
6554 | 6559 |
6555 | 6560 |
6556 void GenericBinaryOpStub::GenerateCall( | 6561 void GenericBinaryOpStub::GenerateCall( |
6557 MacroAssembler* masm, | 6562 MacroAssembler* masm, |
6558 Register left, | 6563 Register left, |
6559 Smi* right) { | 6564 Smi* right) { |
6560 if (!ArgsInRegistersSupported()) { | 6565 if (!ArgsInRegistersSupported()) { |
6561 // Pass arguments on the stack. | 6566 // Pass arguments on the stack. |
6562 __ push(left); | 6567 __ push(left); |
6563 __ push(Immediate(right)); | 6568 __ push(Immediate(right)); |
6564 } else { | 6569 } else { |
6565 // Adapt arguments to the calling convention left in edx and right in eax. | 6570 // The calling convention with registers is left in edx and right in eax. |
6566 if (left.is(edx)) { | 6571 Register left_arg = edx; |
6567 __ mov(eax, Immediate(right)); | 6572 Register right_arg = eax; |
6568 } else if (left.is(eax) && IsOperationCommutative()) { | 6573 if (left.is(left_arg)) { |
6569 __ mov(edx, Immediate(right)); | 6574 __ mov(right_arg, Immediate(right)); |
| 6575 } else if (left.is(right_arg) && IsOperationCommutative()) { |
| 6576 __ mov(left_arg, Immediate(right)); |
6570 SetArgsReversed(); | 6577 SetArgsReversed(); |
6571 } else { | 6578 } else { |
6572 __ mov(edx, left); | 6579 __ mov(left_arg, left); |
6573 __ mov(eax, Immediate(right)); | 6580 __ mov(right_arg, Immediate(right)); |
6574 } | 6581 } |
6575 | 6582 |
6576 // Update flags to indicate that arguments are in registers. | 6583 // Update flags to indicate that arguments are in registers. |
6577 SetArgsInRegisters(); | 6584 SetArgsInRegisters(); |
| 6585 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); |
6578 } | 6586 } |
6579 | 6587 |
6580 // Call the stub. | 6588 // Call the stub. |
6581 __ CallStub(this); | 6589 __ CallStub(this); |
6582 } | 6590 } |
6583 | 6591 |
6584 | 6592 |
6585 void GenericBinaryOpStub::GenerateCall( | 6593 void GenericBinaryOpStub::GenerateCall( |
6586 MacroAssembler* masm, | 6594 MacroAssembler* masm, |
6587 Smi* left, | 6595 Smi* left, |
6588 Register right) { | 6596 Register right) { |
6589 if (!ArgsInRegistersSupported()) { | 6597 if (!ArgsInRegistersSupported()) { |
6590 // Pass arguments on the stack. | 6598 // Pass arguments on the stack. |
6591 __ push(Immediate(left)); | 6599 __ push(Immediate(left)); |
6592 __ push(right); | 6600 __ push(right); |
6593 } else { | 6601 } else { |
6594 // Adapt arguments to the calling convention left in edx and right in eax. | 6602 // The calling convention with registers is left in edx and right in eax. |
6595 bool is_commutative = (op_ == (Token::ADD) || (op_ == Token::MUL)); | 6603 Register left_arg = edx; |
6596 if (right.is(eax)) { | 6604 Register right_arg = eax; |
6597 __ mov(edx, Immediate(left)); | 6605 if (right.is(right_arg)) { |
6598 } else if (right.is(edx) && is_commutative) { | 6606 __ mov(left_arg, Immediate(left)); |
6599 __ mov(eax, Immediate(left)); | 6607 } else if (right.is(left_arg) && IsOperationCommutative()) { |
| 6608 __ mov(right_arg, Immediate(left)); |
| 6609 SetArgsReversed(); |
6600 } else { | 6610 } else { |
6601 __ mov(edx, Immediate(left)); | 6611 __ mov(left_arg, Immediate(left)); |
6602 __ mov(eax, right); | 6612 __ mov(right_arg, right); |
6603 } | 6613 } |
6604 // Update flags to indicate that arguments are in registers. | 6614 // Update flags to indicate that arguments are in registers. |
6605 SetArgsInRegisters(); | 6615 SetArgsInRegisters(); |
| 6616 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1); |
6606 } | 6617 } |
6607 | 6618 |
6608 // Call the stub. | 6619 // Call the stub. |
6609 __ CallStub(this); | 6620 __ CallStub(this); |
6610 } | 6621 } |
6611 | 6622 |
6612 | 6623 |
6613 void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { | 6624 void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { |
6614 // Perform fast-case smi code for the operation (eax <op> ebx) and | 6625 // Perform fast-case smi code for the operation (eax <op> ebx) and |
6615 // leave result in register eax. | 6626 // leave result in register eax. |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6919 __ test(eax, Immediate(0xc0000000)); | 6930 __ test(eax, Immediate(0xc0000000)); |
6920 __ j(not_zero, &non_smi_result); | 6931 __ j(not_zero, &non_smi_result); |
6921 } else { | 6932 } else { |
6922 // Check if result fits in a smi. | 6933 // Check if result fits in a smi. |
6923 __ cmp(eax, 0xc0000000); | 6934 __ cmp(eax, 0xc0000000); |
6924 __ j(negative, &non_smi_result); | 6935 __ j(negative, &non_smi_result); |
6925 } | 6936 } |
6926 // Tag smi result and return. | 6937 // Tag smi result and return. |
6927 ASSERT(kSmiTagSize == times_2); // adjust code if not the case | 6938 ASSERT(kSmiTagSize == times_2); // adjust code if not the case |
6928 __ lea(eax, Operand(eax, eax, times_1, kSmiTag)); | 6939 __ lea(eax, Operand(eax, eax, times_1, kSmiTag)); |
6929 __ ret(2 * kPointerSize); | 6940 GenerateReturn(masm); |
6930 | 6941 |
6931 // All ops except SHR return a signed int32 that we load in a HeapNumber. | 6942 // All ops except SHR return a signed int32 that we load in a HeapNumber. |
6932 if (op_ != Token::SHR) { | 6943 if (op_ != Token::SHR) { |
6933 __ bind(&non_smi_result); | 6944 __ bind(&non_smi_result); |
6934 // Allocate a heap number if needed. | 6945 // Allocate a heap number if needed. |
6935 __ mov(ebx, Operand(eax)); // ebx: result | 6946 __ mov(ebx, Operand(eax)); // ebx: result |
6936 switch (mode_) { | 6947 switch (mode_) { |
6937 case OVERWRITE_LEFT: | 6948 case OVERWRITE_LEFT: |
6938 case OVERWRITE_RIGHT: | 6949 case OVERWRITE_RIGHT: |
6939 // If the operand was an object, we skip the | 6950 // If the operand was an object, we skip the |
6940 // allocation of a heap number. | 6951 // allocation of a heap number. |
6941 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? | 6952 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? |
6942 1 * kPointerSize : 2 * kPointerSize)); | 6953 1 * kPointerSize : 2 * kPointerSize)); |
6943 __ test(eax, Immediate(kSmiTagMask)); | 6954 __ test(eax, Immediate(kSmiTagMask)); |
6944 __ j(not_zero, &skip_allocation, not_taken); | 6955 __ j(not_zero, &skip_allocation, not_taken); |
6945 // Fall through! | 6956 // Fall through! |
6946 case NO_OVERWRITE: | 6957 case NO_OVERWRITE: |
6947 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); | 6958 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); |
6948 __ bind(&skip_allocation); | 6959 __ bind(&skip_allocation); |
6949 break; | 6960 break; |
6950 default: UNREACHABLE(); | 6961 default: UNREACHABLE(); |
6951 } | 6962 } |
6952 // Store the result in the HeapNumber and return. | 6963 // Store the result in the HeapNumber and return. |
6953 __ mov(Operand(esp, 1 * kPointerSize), ebx); | 6964 __ mov(Operand(esp, 1 * kPointerSize), ebx); |
6954 __ fild_s(Operand(esp, 1 * kPointerSize)); | 6965 __ fild_s(Operand(esp, 1 * kPointerSize)); |
6955 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 6966 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
6956 __ ret(2 * kPointerSize); | 6967 GenerateReturn(masm); |
6957 } | 6968 } |
6958 | 6969 |
6959 // Clear the FPU exception flag and reset the stack before calling | 6970 // Clear the FPU exception flag and reset the stack before calling |
6960 // the runtime system. | 6971 // the runtime system. |
6961 __ bind(&operand_conversion_failure); | 6972 __ bind(&operand_conversion_failure); |
6962 __ add(Operand(esp), Immediate(2 * kPointerSize)); | 6973 __ add(Operand(esp), Immediate(2 * kPointerSize)); |
6963 if (use_sse3_) { | 6974 if (use_sse3_) { |
6964 // If we've used the SSE3 instructions for truncating the | 6975 // If we've used the SSE3 instructions for truncating the |
6965 // floating point values to integers and it failed, we have a | 6976 // floating point values to integers and it failed, we have a |
6966 // pending #IA exception. Clear it. | 6977 // pending #IA exception. Clear it. |
(...skipping 11 matching lines...) Expand all Loading... |
6978 } | 6989 } |
6979 __ mov(eax, Operand(esp, 1 * kPointerSize)); | 6990 __ mov(eax, Operand(esp, 1 * kPointerSize)); |
6980 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 6991 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
6981 break; | 6992 break; |
6982 } | 6993 } |
6983 default: UNREACHABLE(); break; | 6994 default: UNREACHABLE(); break; |
6984 } | 6995 } |
6985 | 6996 |
6986 // If all else fails, use the runtime system to get the correct | 6997 // If all else fails, use the runtime system to get the correct |
6987 // result. If arguments was passed in registers now place them on the | 6998 // result. If arguments was passed in registers now place them on the |
6988 // stack in the correct order. | 6999 // stack in the correct order below the return address. |
6989 __ bind(&call_runtime); | 7000 __ bind(&call_runtime); |
6990 if (HasArgumentsInRegisters()) { | 7001 if (HasArgumentsInRegisters()) { |
6991 __ pop(ecx); | 7002 __ pop(ecx); |
6992 if (HasArgumentsReversed()) { | 7003 if (HasArgumentsReversed()) { |
6993 __ push(eax); | 7004 __ push(eax); |
6994 __ push(edx); | 7005 __ push(edx); |
6995 } else { | 7006 } else { |
6996 __ push(edx); | 7007 __ push(edx); |
6997 __ push(eax); | 7008 __ push(eax); |
6998 } | 7009 } |
(...skipping 1069 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8068 | 8079 |
8069 int CompareStub::MinorKey() { | 8080 int CompareStub::MinorKey() { |
8070 // Encode the two parameters in a unique 16 bit value. | 8081 // Encode the two parameters in a unique 16 bit value. |
8071 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); | 8082 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); |
8072 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); | 8083 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); |
8073 } | 8084 } |
8074 | 8085 |
8075 #undef __ | 8086 #undef __ |
8076 | 8087 |
8077 } } // namespace v8::internal | 8088 } } // namespace v8::internal |
OLD | NEW |