OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1225 __ TailCallStub(&string_add_right_stub); | 1225 __ TailCallStub(&string_add_right_stub); |
1226 | 1226 |
1227 // Neither argument is a string. | 1227 // Neither argument is a string. |
1228 __ bind(&call_runtime); | 1228 __ bind(&call_runtime); |
1229 } | 1229 } |
1230 | 1230 |
1231 | 1231 |
1232 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 1232 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
1233 Label right_arg_changed, call_runtime; | 1233 Label right_arg_changed, call_runtime; |
1234 | 1234 |
1235 if (op_ == Token::MOD && has_fixed_right_arg_) { | 1235 if (op_ == Token::MOD && encoded_right_arg_.has_value) { |
1236 // It is guaranteed that the value will fit into a Smi, because if it | 1236 // It is guaranteed that the value will fit into a Smi, because if it |
1237 // didn't, we wouldn't be here, see BinaryOp_Patch. | 1237 // didn't, we wouldn't be here, see BinaryOp_Patch. |
1238 __ Cmp(rax, Smi::FromInt(fixed_right_arg_value())); | 1238 __ Cmp(rax, Smi::FromInt(fixed_right_arg_value())); |
1239 __ j(not_equal, &right_arg_changed); | 1239 __ j(not_equal, &right_arg_changed); |
1240 } | 1240 } |
1241 | 1241 |
1242 if (result_type_ == BinaryOpIC::UNINITIALIZED || | 1242 if (result_type_ == BinaryOpIC::UNINITIALIZED || |
1243 result_type_ == BinaryOpIC::SMI) { | 1243 result_type_ == BinaryOpIC::SMI) { |
1244 // Only allow smi results. | 1244 // Only allow smi results. |
1245 BinaryOpStub_GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS, op_); | 1245 BinaryOpStub_GenerateSmiCode(masm, NULL, NO_HEAPNUMBER_RESULTS, op_); |
(...skipping 5853 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7099 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); | 7099 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); |
7100 } | 7100 } |
7101 } | 7101 } |
7102 | 7102 |
7103 | 7103 |
7104 #undef __ | 7104 #undef __ |
7105 | 7105 |
7106 } } // namespace v8::internal | 7106 } } // namespace v8::internal |
7107 | 7107 |
7108 #endif // V8_TARGET_ARCH_X64 | 7108 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |