OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2274 | 2274 |
2275 __ bind(&stub_call); | 2275 __ bind(&stub_call); |
2276 __ movq(rax, rcx); | 2276 __ movq(rax, rcx); |
2277 BinaryOpStub stub(op, mode); | 2277 BinaryOpStub stub(op, mode); |
2278 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, | 2278 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, |
2279 expr->BinaryOperationFeedbackId()); | 2279 expr->BinaryOperationFeedbackId()); |
2280 patch_site.EmitPatchInfo(); | 2280 patch_site.EmitPatchInfo(); |
2281 __ jmp(&done, Label::kNear); | 2281 __ jmp(&done, Label::kNear); |
2282 | 2282 |
2283 __ bind(&smi_case); | 2283 __ bind(&smi_case); |
2284 MacroAssembler::StrictSmiInstructionWrapper wrapper(masm_, &stub_call); | |
2284 switch (op) { | 2285 switch (op) { |
2285 case Token::SAR: | 2286 case Token::SAR: |
2286 __ SmiShiftArithmeticRight(rax, rdx, rcx); | 2287 __ SmiShiftArithmeticRight(rax, rdx, rcx); |
2287 break; | 2288 break; |
2288 case Token::SHL: | 2289 case Token::SHL: |
2289 __ SmiShiftLeft(rax, rdx, rcx); | 2290 __ SmiShiftLeft(rax, rdx, rcx, &stub_call); |
2290 break; | 2291 break; |
2291 case Token::SHR: | 2292 case Token::SHR: |
2292 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call); | 2293 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call); |
2293 break; | 2294 break; |
2294 case Token::ADD: | 2295 case Token::ADD: |
2295 __ SmiAdd(rax, rdx, rcx, &stub_call); | 2296 __ SmiAdd(rax, rdx, rcx, wrapper); |
2296 break; | 2297 break; |
2297 case Token::SUB: | 2298 case Token::SUB: |
2298 __ SmiSub(rax, rdx, rcx, &stub_call); | 2299 __ SmiSub(rax, rdx, rcx, wrapper); |
2299 break; | 2300 break; |
2300 case Token::MUL: | 2301 case Token::MUL: |
2301 __ SmiMul(rax, rdx, rcx, &stub_call); | 2302 __ SmiMul(rax, rdx, rcx, &stub_call); |
2302 break; | 2303 break; |
2303 case Token::BIT_OR: | 2304 case Token::BIT_OR: |
2304 __ SmiOr(rax, rdx, rcx); | 2305 __ SmiOr(rax, rdx, rcx); |
2305 break; | 2306 break; |
2306 case Token::BIT_AND: | 2307 case Token::BIT_AND: |
2307 __ SmiAnd(rax, rdx, rcx); | 2308 __ SmiAnd(rax, rdx, rcx); |
2308 break; | 2309 break; |
(...skipping 2134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4443 } | 4444 } |
4444 } | 4445 } |
4445 } | 4446 } |
4446 | 4447 |
4447 // Inline smi case if we are in a loop. | 4448 // Inline smi case if we are in a loop. |
4448 Label done, stub_call; | 4449 Label done, stub_call; |
4449 JumpPatchSite patch_site(masm_); | 4450 JumpPatchSite patch_site(masm_); |
4450 | 4451 |
4451 if (ShouldInlineSmiCase(expr->op())) { | 4452 if (ShouldInlineSmiCase(expr->op())) { |
4452 if (expr->op() == Token::INC) { | 4453 if (expr->op() == Token::INC) { |
4453 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 4454 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
danno
2013/08/19 21:47:44
Shouldn't overflow also be handled in SmiAddConsta
haitao.feng
2013/08/20 15:09:30
I will address this in https://codereview.chromium
| |
4455 if (SmiValuesAre31Bits()) { | |
4456 // positive overflow | |
4457 __ testl(rax, Immediate(0x80000000)); | |
4458 __ j(not_zero, &stub_call, Label::kNear); | |
4459 } | |
4454 } else { | 4460 } else { |
4455 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 4461 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
4462 if (SmiValuesAre31Bits()) { | |
4463 // negative overflow | |
4464 __ testl(rax, Immediate(0x80000000)); | |
4465 __ j(zero, &stub_call, Label::kNear); | |
4466 } | |
4456 } | 4467 } |
4457 __ j(overflow, &stub_call, Label::kNear); | 4468 if (SmiValuesAre32Bits()) { |
4469 __ j(overflow, &stub_call, Label::kNear); | |
4470 } | |
4458 // We could eliminate this smi check if we split the code at | 4471 // We could eliminate this smi check if we split the code at |
4459 // the first smi check before calling ToNumber. | 4472 // the first smi check before calling ToNumber. |
4460 patch_site.EmitJumpIfSmi(rax, &done, Label::kNear); | 4473 patch_site.EmitJumpIfSmi(rax, &done, Label::kNear); |
4461 | 4474 |
4462 __ bind(&stub_call); | 4475 __ bind(&stub_call); |
4463 // Call stub. Undo operation first. | 4476 // Call stub. Undo operation first. |
4464 if (expr->op() == Token::INC) { | 4477 if (expr->op() == Token::INC) { |
4465 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 4478 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
4466 } else { | 4479 } else { |
4467 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 4480 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4902 *context_length = 0; | 4915 *context_length = 0; |
4903 return previous_; | 4916 return previous_; |
4904 } | 4917 } |
4905 | 4918 |
4906 | 4919 |
4907 #undef __ | 4920 #undef __ |
4908 | 4921 |
4909 } } // namespace v8::internal | 4922 } } // namespace v8::internal |
4910 | 4923 |
4911 #endif // V8_TARGET_ARCH_X64 | 4924 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |