| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 554 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 565 if (FLAG_trap_on_deopt) __ int3(); | 565 if (FLAG_trap_on_deopt) __ int3(); |
| 566 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); | 566 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); |
| 567 } else { | 567 } else { |
| 568 if (FLAG_trap_on_deopt) { | 568 if (FLAG_trap_on_deopt) { |
| 569 Label done; | 569 Label done; |
| 570 __ j(NegateCondition(cc), &done, Label::kNear); | 570 __ j(NegateCondition(cc), &done, Label::kNear); |
| 571 __ int3(); | 571 __ int3(); |
| 572 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); | 572 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); |
| 573 __ bind(&done); | 573 __ bind(&done); |
| 574 } else { | 574 } else { |
| 575 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken); | 575 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY); |
| 576 } | 576 } |
| 577 } | 577 } |
| 578 } | 578 } |
| 579 | 579 |
| 580 | 580 |
| 581 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 581 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
| 582 int length = deoptimizations_.length(); | 582 int length = deoptimizations_.length(); |
| 583 if (length == 0) return; | 583 if (length == 0) return; |
| 584 ASSERT(FLAG_deopt); | 584 ASSERT(FLAG_deopt); |
| 585 Handle<DeoptimizationInputData> data = | 585 Handle<DeoptimizationInputData> data = |
| (...skipping 889 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1475 void LCodeGen::DoCmpID(LCmpID* instr) { | 1475 void LCodeGen::DoCmpID(LCmpID* instr) { |
| 1476 LOperand* left = instr->InputAt(0); | 1476 LOperand* left = instr->InputAt(0); |
| 1477 LOperand* right = instr->InputAt(1); | 1477 LOperand* right = instr->InputAt(1); |
| 1478 LOperand* result = instr->result(); | 1478 LOperand* result = instr->result(); |
| 1479 | 1479 |
| 1480 Label unordered; | 1480 Label unordered; |
| 1481 if (instr->is_double()) { | 1481 if (instr->is_double()) { |
| 1482 // Don't base result on EFLAGS when a NaN is involved. Instead | 1482 // Don't base result on EFLAGS when a NaN is involved. Instead |
| 1483 // jump to the unordered case, which produces a false value. | 1483 // jump to the unordered case, which produces a false value. |
| 1484 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); | 1484 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); |
| 1485 __ j(parity_even, &unordered, not_taken, Label::kNear); | 1485 __ j(parity_even, &unordered, Label::kNear); |
| 1486 } else { | 1486 } else { |
| 1487 EmitCmpI(left, right); | 1487 EmitCmpI(left, right); |
| 1488 } | 1488 } |
| 1489 | 1489 |
| 1490 Label done; | 1490 Label done; |
| 1491 Condition cc = TokenToCondition(instr->op(), instr->is_double()); | 1491 Condition cc = TokenToCondition(instr->op(), instr->is_double()); |
| 1492 __ mov(ToRegister(result), factory()->true_value()); | 1492 __ mov(ToRegister(result), factory()->true_value()); |
| 1493 __ j(cc, &done, Label::kNear); | 1493 __ j(cc, &done, Label::kNear); |
| 1494 | 1494 |
| 1495 __ bind(&unordered); | 1495 __ bind(&unordered); |
| (...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1999 | 1999 |
| 2000 DeferredInstanceOfKnownGlobal* deferred; | 2000 DeferredInstanceOfKnownGlobal* deferred; |
| 2001 deferred = new DeferredInstanceOfKnownGlobal(this, instr); | 2001 deferred = new DeferredInstanceOfKnownGlobal(this, instr); |
| 2002 | 2002 |
| 2003 Label done, false_result; | 2003 Label done, false_result; |
| 2004 Register object = ToRegister(instr->InputAt(0)); | 2004 Register object = ToRegister(instr->InputAt(0)); |
| 2005 Register temp = ToRegister(instr->TempAt(0)); | 2005 Register temp = ToRegister(instr->TempAt(0)); |
| 2006 | 2006 |
| 2007 // A Smi is not an instance of anything. | 2007 // A Smi is not an instance of anything. |
| 2008 __ test(object, Immediate(kSmiTagMask)); | 2008 __ test(object, Immediate(kSmiTagMask)); |
| 2009 __ j(zero, &false_result, not_taken); | 2009 __ j(zero, &false_result); |
| 2010 | 2010 |
| 2011 // This is the inlined call site instanceof cache. The two occurences of the | 2011 // This is the inlined call site instanceof cache. The two occurences of the |
| 2012 // hole value will be patched to the last map/result pair generated by the | 2012 // hole value will be patched to the last map/result pair generated by the |
| 2013 // instanceof stub. | 2013 // instanceof stub. |
| 2014 Label cache_miss; | 2014 Label cache_miss; |
| 2015 Register map = ToRegister(instr->TempAt(0)); | 2015 Register map = ToRegister(instr->TempAt(0)); |
| 2016 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); | 2016 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2017 __ bind(deferred->map_check()); // Label for calculating code patching. | 2017 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2018 __ cmp(map, factory()->the_hole_value()); // Patched to cached map. | 2018 __ cmp(map, factory()->the_hole_value()); // Patched to cached map. |
| 2019 __ j(not_equal, &cache_miss, not_taken, Label::kNear); | 2019 __ j(not_equal, &cache_miss, Label::kNear); |
| 2020 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. | 2020 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. |
| 2021 __ jmp(&done); | 2021 __ jmp(&done); |
| 2022 | 2022 |
| 2023 // The inlined call site cache did not match. Check for null and string | 2023 // The inlined call site cache did not match. Check for null and string |
| 2024 // before calling the deferred code. | 2024 // before calling the deferred code. |
| 2025 __ bind(&cache_miss); | 2025 __ bind(&cache_miss); |
| 2026 // Null is not an instance of anything. | 2026 // Null is not an instance of anything. |
| 2027 __ cmp(object, factory()->null_value()); | 2027 __ cmp(object, factory()->null_value()); |
| 2028 __ j(equal, &false_result); | 2028 __ j(equal, &false_result); |
| 2029 | 2029 |
| (...skipping 1560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3590 } | 3590 } |
| 3591 | 3591 |
| 3592 | 3592 |
| 3593 void LCodeGen::EmitNumberUntagD(Register input_reg, | 3593 void LCodeGen::EmitNumberUntagD(Register input_reg, |
| 3594 XMMRegister result_reg, | 3594 XMMRegister result_reg, |
| 3595 LEnvironment* env) { | 3595 LEnvironment* env) { |
| 3596 Label load_smi, heap_number, done; | 3596 Label load_smi, heap_number, done; |
| 3597 | 3597 |
| 3598 // Smi check. | 3598 // Smi check. |
| 3599 __ test(input_reg, Immediate(kSmiTagMask)); | 3599 __ test(input_reg, Immediate(kSmiTagMask)); |
| 3600 __ j(zero, &load_smi, not_taken, Label::kNear); | 3600 __ j(zero, &load_smi, Label::kNear); |
| 3601 | 3601 |
| 3602 // Heap number map check. | 3602 // Heap number map check. |
| 3603 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 3603 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 3604 factory()->heap_number_map()); | 3604 factory()->heap_number_map()); |
| 3605 __ j(equal, &heap_number, Label::kNear); | 3605 __ j(equal, &heap_number, Label::kNear); |
| 3606 | 3606 |
| 3607 __ cmp(input_reg, factory()->undefined_value()); | 3607 __ cmp(input_reg, factory()->undefined_value()); |
| 3608 DeoptimizeIf(not_equal, env); | 3608 DeoptimizeIf(not_equal, env); |
| 3609 | 3609 |
| 3610 // Convert undefined to NaN. | 3610 // Convert undefined to NaN. |
| (...skipping 763 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4374 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 4374 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); |
| 4375 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); | 4375 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); |
| 4376 } | 4376 } |
| 4377 | 4377 |
| 4378 | 4378 |
| 4379 #undef __ | 4379 #undef __ |
| 4380 | 4380 |
| 4381 } } // namespace v8::internal | 4381 } } // namespace v8::internal |
| 4382 | 4382 |
| 4383 #endif // V8_TARGET_ARCH_IA32 | 4383 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |