Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(80)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 6682026: Fix SmiCompare on 64 bit to distinguish between comparisons where... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 748 matching lines...) Expand 10 before | Expand all | Expand 10 after
759 // The value to convert should be popped from the frame. 759 // The value to convert should be popped from the frame.
760 Result value = frame_->Pop(); 760 Result value = frame_->Pop();
761 value.ToRegister(); 761 value.ToRegister();
762 762
763 if (value.is_number()) { 763 if (value.is_number()) {
764 // Fast case if TypeInfo indicates only numbers. 764 // Fast case if TypeInfo indicates only numbers.
765 if (FLAG_debug_code) { 765 if (FLAG_debug_code) {
766 __ AbortIfNotNumber(value.reg()); 766 __ AbortIfNotNumber(value.reg());
767 } 767 }
768 // Smi => false iff zero. 768 // Smi => false iff zero.
769 __ SmiCompare(value.reg(), Smi::FromInt(0)); 769 __ Cmp(value.reg(), Smi::FromInt(0));
770 if (value.is_smi()) { 770 if (value.is_smi()) {
771 value.Unuse(); 771 value.Unuse();
772 dest->Split(not_zero); 772 dest->Split(not_zero);
773 } else { 773 } else {
774 dest->false_target()->Branch(equal); 774 dest->false_target()->Branch(equal);
775 Condition is_smi = masm_->CheckSmi(value.reg()); 775 Condition is_smi = masm_->CheckSmi(value.reg());
776 dest->true_target()->Branch(is_smi); 776 dest->true_target()->Branch(is_smi);
777 __ xorpd(xmm0, xmm0); 777 __ xorpd(xmm0, xmm0);
778 __ ucomisd(xmm0, FieldOperand(value.reg(), HeapNumber::kValueOffset)); 778 __ ucomisd(xmm0, FieldOperand(value.reg(), HeapNumber::kValueOffset));
779 value.Unuse(); 779 value.Unuse();
780 dest->Split(not_zero); 780 dest->Split(not_zero);
781 } 781 }
782 } else { 782 } else {
783 // Fast case checks. 783 // Fast case checks.
784 // 'false' => false. 784 // 'false' => false.
785 __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex); 785 __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
786 dest->false_target()->Branch(equal); 786 dest->false_target()->Branch(equal);
787 787
788 // 'true' => true. 788 // 'true' => true.
789 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex); 789 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
790 dest->true_target()->Branch(equal); 790 dest->true_target()->Branch(equal);
791 791
792 // 'undefined' => false. 792 // 'undefined' => false.
793 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex); 793 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
794 dest->false_target()->Branch(equal); 794 dest->false_target()->Branch(equal);
795 795
796 // Smi => false iff zero. 796 // Smi => false iff zero.
797 __ SmiCompare(value.reg(), Smi::FromInt(0)); 797 __ Cmp(value.reg(), Smi::FromInt(0));
798 dest->false_target()->Branch(equal); 798 dest->false_target()->Branch(equal);
799 Condition is_smi = masm_->CheckSmi(value.reg()); 799 Condition is_smi = masm_->CheckSmi(value.reg());
800 dest->true_target()->Branch(is_smi); 800 dest->true_target()->Branch(is_smi);
801 801
802 // Call the stub for all other cases. 802 // Call the stub for all other cases.
803 frame_->Push(&value); // Undo the Pop() from above. 803 frame_->Push(&value); // Undo the Pop() from above.
804 ToBooleanStub stub; 804 ToBooleanStub stub;
805 Result temp = frame_->CallStub(&stub, 1); 805 Result temp = frame_->CallStub(&stub, 1);
806 // Convert the result to a condition code. 806 // Convert the result to a condition code.
807 __ testq(temp.reg(), temp.reg()); 807 __ testq(temp.reg(), temp.reg());
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after
1029 operands_type); 1029 operands_type);
1030 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right); 1030 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
1031 } else if (right_is_smi_constant) { 1031 } else if (right_is_smi_constant) {
1032 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(), 1032 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1033 false, overwrite_mode); 1033 false, overwrite_mode);
1034 } else if (left_is_smi_constant) { 1034 } else if (left_is_smi_constant) {
1035 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(), 1035 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1036 true, overwrite_mode); 1036 true, overwrite_mode);
1037 } else { 1037 } else {
1038 // Set the flags based on the operation, type and loop nesting level. 1038 // Set the flags based on the operation, type and loop nesting level.
1039 // Bit operations always assume they likely operate on Smis. Still only 1039 // Bit operations always assume they likely operate on smis. Still only
1040 // generate the inline Smi check code if this operation is part of a loop. 1040 // generate the inline Smi check code if this operation is part of a loop.
1041 // For all other operations only inline the Smi check code for likely smis 1041 // For all other operations only inline the Smi check code for likely smis
1042 // if the operation is part of a loop. 1042 // if the operation is part of a loop.
1043 if (loop_nesting() > 0 && 1043 if (loop_nesting() > 0 &&
1044 (Token::IsBitOp(op) || 1044 (Token::IsBitOp(op) ||
1045 operands_type.IsInteger32() || 1045 operands_type.IsInteger32() ||
1046 expr->type()->IsLikelySmi())) { 1046 expr->type()->IsLikelySmi())) {
1047 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode); 1047 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
1048 } else { 1048 } else {
1049 GenericBinaryOpStub stub(op, 1049 GenericBinaryOpStub stub(op,
(...skipping 1051 matching lines...) Expand 10 before | Expand all | Expand 10 after
2101 is_string.Bind(&left_side); 2101 is_string.Bind(&left_side);
2102 // left_side is a sequential ASCII string. 2102 // left_side is a sequential ASCII string.
2103 ASSERT(left_side.reg().is(left_reg)); 2103 ASSERT(left_side.reg().is(left_reg));
2104 right_side = Result(right_val); 2104 right_side = Result(right_val);
2105 Result temp2 = allocator_->Allocate(); 2105 Result temp2 = allocator_->Allocate();
2106 ASSERT(temp2.is_valid()); 2106 ASSERT(temp2.is_valid());
2107 // Test string equality and comparison. 2107 // Test string equality and comparison.
2108 if (cc == equal) { 2108 if (cc == equal) {
2109 Label comparison_done; 2109 Label comparison_done;
2110 __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset), 2110 __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
2111 Smi::FromInt(1)); 2111 Smi::FromInt(1));
2112 __ j(not_equal, &comparison_done); 2112 __ j(not_equal, &comparison_done);
2113 uint8_t char_value = 2113 uint8_t char_value =
2114 static_cast<uint8_t>(String::cast(*right_val)->Get(0)); 2114 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
2115 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize), 2115 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2116 Immediate(char_value)); 2116 Immediate(char_value));
2117 __ bind(&comparison_done); 2117 __ bind(&comparison_done);
2118 } else { 2118 } else {
2119 __ movq(temp2.reg(), 2119 __ movq(temp2.reg(),
2120 FieldOperand(left_side.reg(), String::kLengthOffset)); 2120 FieldOperand(left_side.reg(), String::kLengthOffset));
2121 __ SmiSubConstant(temp2.reg(), temp2.reg(), Smi::FromInt(1)); 2121 __ SmiSubConstant(temp2.reg(), temp2.reg(), Smi::FromInt(1));
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
2287 // Since one side is a constant Smi, conversion order does not matter. 2287 // Since one side is a constant Smi, conversion order does not matter.
2288 if (left_side_constant_smi) { 2288 if (left_side_constant_smi) {
2289 Result* temp = left_side; 2289 Result* temp = left_side;
2290 left_side = right_side; 2290 left_side = right_side;
2291 right_side = temp; 2291 right_side = temp;
2292 cc = ReverseCondition(cc); 2292 cc = ReverseCondition(cc);
2293 // This may re-introduce greater or less_equal as the value of cc. 2293 // This may re-introduce greater or less_equal as the value of cc.
2294 // CompareStub and the inline code both support all values of cc. 2294 // CompareStub and the inline code both support all values of cc.
2295 } 2295 }
2296 // Implement comparison against a constant Smi, inlining the case 2296 // Implement comparison against a constant Smi, inlining the case
2297 // where both sides are Smis. 2297 // where both sides are smis.
2298 left_side->ToRegister(); 2298 left_side->ToRegister();
2299 Register left_reg = left_side->reg(); 2299 Register left_reg = left_side->reg();
2300 Smi* constant_smi = Smi::cast(*right_side->handle()); 2300 Smi* constant_smi = Smi::cast(*right_side->handle());
2301 2301
2302 if (left_side->is_smi()) { 2302 if (left_side->is_smi()) {
2303 if (FLAG_debug_code) { 2303 if (FLAG_debug_code) {
2304 __ AbortIfNotSmi(left_reg); 2304 __ AbortIfNotSmi(left_reg);
2305 } 2305 }
2306 // Test smi equality and comparison by signed int comparison. 2306 // Test smi equality and comparison by signed int comparison.
2307 // Both sides are smis, so we can use an Immediate.
2308 __ SmiCompare(left_reg, constant_smi); 2307 __ SmiCompare(left_reg, constant_smi);
2309 left_side->Unuse(); 2308 left_side->Unuse();
2310 right_side->Unuse(); 2309 right_side->Unuse();
2311 dest->Split(cc); 2310 dest->Split(cc);
2312 } else { 2311 } else {
2313 // Only the case where the left side could possibly be a non-smi is left. 2312 // Only the case where the left side could possibly be a non-smi is left.
2314 JumpTarget is_smi; 2313 JumpTarget is_smi;
2315 if (cc == equal) { 2314 if (cc == equal) {
2316 // We can do the equality comparison before the smi check. 2315 // We can do the equality comparison before the smi check.
2317 __ SmiCompare(left_reg, constant_smi); 2316 __ Cmp(left_reg, constant_smi);
2318 dest->true_target()->Branch(equal); 2317 dest->true_target()->Branch(equal);
2319 Condition left_is_smi = masm_->CheckSmi(left_reg); 2318 Condition left_is_smi = masm_->CheckSmi(left_reg);
2320 dest->false_target()->Branch(left_is_smi); 2319 dest->false_target()->Branch(left_is_smi);
2321 } else { 2320 } else {
2322 // Do the smi check, then the comparison. 2321 // Do the smi check, then the comparison.
2323 Condition left_is_smi = masm_->CheckSmi(left_reg); 2322 Condition left_is_smi = masm_->CheckSmi(left_reg);
2324 is_smi.Branch(left_is_smi, left_side, right_side); 2323 is_smi.Branch(left_is_smi, left_side, right_side);
2325 } 2324 }
2326 2325
2327 // Jump or fall through to here if we are comparing a non-smi to a 2326 // Jump or fall through to here if we are comparing a non-smi to a
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
2568 __ movq(rdi, Operand(rsp, 2 * kPointerSize)); 2567 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
2569 is_smi = masm_->CheckSmi(rdi); 2568 is_smi = masm_->CheckSmi(rdi);
2570 __ j(is_smi, &build_args); 2569 __ j(is_smi, &build_args);
2571 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2570 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2572 __ j(not_equal, &build_args); 2571 __ j(not_equal, &build_args);
2573 2572
2574 // Copy the arguments to this function possibly from the 2573 // Copy the arguments to this function possibly from the
2575 // adaptor frame below it. 2574 // adaptor frame below it.
2576 Label invoke, adapted; 2575 Label invoke, adapted;
2577 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2576 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2578 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), 2577 __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
2579 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2578 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2580 __ j(equal, &adapted); 2579 __ j(equal, &adapted);
2581 2580
2582 // No arguments adaptor frame. Copy fixed number of arguments. 2581 // No arguments adaptor frame. Copy fixed number of arguments.
2583 __ Set(rax, scope()->num_parameters()); 2582 __ Set(rax, scope()->num_parameters());
2584 for (int i = 0; i < scope()->num_parameters(); i++) { 2583 for (int i = 0; i < scope()->num_parameters(); i++) {
2585 __ push(frame_->ParameterAt(i)); 2584 __ push(frame_->ParameterAt(i));
2586 } 2585 }
2587 __ jmp(&invoke); 2586 __ jmp(&invoke);
2588 2587
2589 // Arguments adaptor frame present. Copy arguments from there, but 2588 // Arguments adaptor frame present. Copy arguments from there, but
(...skipping 1260 matching lines...) Expand 10 before | Expand all | Expand 10 after
3850 __ cmpq(rcx, rdx); 3849 __ cmpq(rcx, rdx);
3851 end_del_check.Branch(equal); 3850 end_del_check.Branch(equal);
3852 3851
3853 // Convert the entry to a string (or null if it isn't a property anymore). 3852 // Convert the entry to a string (or null if it isn't a property anymore).
3854 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable 3853 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
3855 frame_->EmitPush(rbx); // push entry 3854 frame_->EmitPush(rbx); // push entry
3856 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2); 3855 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
3857 __ movq(rbx, rax); 3856 __ movq(rbx, rax);
3858 3857
3859 // If the property has been removed while iterating, we just skip it. 3858 // If the property has been removed while iterating, we just skip it.
3860 __ SmiCompare(rbx, Smi::FromInt(0)); 3859 __ Cmp(rbx, Smi::FromInt(0));
3861 node->continue_target()->Branch(equal); 3860 node->continue_target()->Branch(equal);
3862 3861
3863 end_del_check.Bind(); 3862 end_del_check.Bind();
3864 // Store the entry in the 'each' expression and take another spin in the 3863 // Store the entry in the 'each' expression and take another spin in the
3865 // loop. rdx: i'th entry of the enum cache (or string there of) 3864 // loop. rdx: i'th entry of the enum cache (or string there of)
3866 frame_->EmitPush(rbx); 3865 frame_->EmitPush(rbx);
3867 { Reference each(this, node->each()); 3866 { Reference each(this, node->each());
3868 // Loading a reference may leave the frame in an unspilled state. 3867 // Loading a reference may leave the frame in an unspilled state.
3869 frame_->SpillAll(); 3868 frame_->SpillAll();
3870 if (!each.is_illegal()) { 3869 if (!each.is_illegal()) {
(...skipping 2320 matching lines...) Expand 10 before | Expand all | Expand 10 after
6191 6190
6192 void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) { 6191 void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6193 ASSERT(args->length() == 0); 6192 ASSERT(args->length() == 0);
6194 6193
6195 // Get the frame pointer for the calling frame. 6194 // Get the frame pointer for the calling frame.
6196 Result fp = allocator()->Allocate(); 6195 Result fp = allocator()->Allocate();
6197 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 6196 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
6198 6197
6199 // Skip the arguments adaptor frame if it exists. 6198 // Skip the arguments adaptor frame if it exists.
6200 Label check_frame_marker; 6199 Label check_frame_marker;
6201 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset), 6200 __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6202 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 6201 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
6203 __ j(not_equal, &check_frame_marker); 6202 __ j(not_equal, &check_frame_marker);
6204 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset)); 6203 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6205 6204
6206 // Check the marker in the calling frame. 6205 // Check the marker in the calling frame.
6207 __ bind(&check_frame_marker); 6206 __ bind(&check_frame_marker);
6208 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset), 6207 __ Cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6209 Smi::FromInt(StackFrame::CONSTRUCT)); 6208 Smi::FromInt(StackFrame::CONSTRUCT));
6210 fp.Unuse(); 6209 fp.Unuse();
6211 destination()->Split(equal); 6210 destination()->Split(equal);
6212 } 6211 }
6213 6212
6214 6213
6215 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { 6214 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6216 ASSERT(args->length() == 0); 6215 ASSERT(args->length() == 0);
6217 6216
6218 Result fp = allocator_->Allocate(); 6217 Result fp = allocator_->Allocate();
6219 Result result = allocator_->Allocate(); 6218 Result result = allocator_->Allocate();
6220 ASSERT(fp.is_valid() && result.is_valid()); 6219 ASSERT(fp.is_valid() && result.is_valid());
6221 6220
6222 Label exit; 6221 Label exit;
6223 6222
6224 // Get the number of formal parameters. 6223 // Get the number of formal parameters.
6225 __ Move(result.reg(), Smi::FromInt(scope()->num_parameters())); 6224 __ Move(result.reg(), Smi::FromInt(scope()->num_parameters()));
6226 6225
6227 // Check if the calling frame is an arguments adaptor frame. 6226 // Check if the calling frame is an arguments adaptor frame.
6228 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 6227 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
6229 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset), 6228 __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6230 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 6229 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
6231 __ j(not_equal, &exit); 6230 __ j(not_equal, &exit);
6232 6231
6233 // Arguments adaptor case: Read the arguments length from the 6232 // Arguments adaptor case: Read the arguments length from the
6234 // adaptor frame. 6233 // adaptor frame.
6235 __ movq(result.reg(), 6234 __ movq(result.reg(),
6236 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset)); 6235 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
6237 6236
6238 __ bind(&exit); 6237 __ bind(&exit);
6239 result.set_type_info(TypeInfo::Smi()); 6238 result.set_type_info(TypeInfo::Smi());
6240 if (FLAG_debug_code) { 6239 if (FLAG_debug_code) {
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after
6776 frame_->Spill(index1.reg()); 6775 frame_->Spill(index1.reg());
6777 frame_->Spill(index2.reg()); 6776 frame_->Spill(index2.reg());
6778 6777
6779 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(), 6778 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
6780 index1.reg(), 6779 index1.reg(),
6781 index2.reg()); 6780 index2.reg());
6782 6781
6783 // Fetch the map and check if array is in fast case. 6782 // Fetch the map and check if array is in fast case.
6784 // Check that object doesn't require security checks and 6783 // Check that object doesn't require security checks and
6785 // has no indexed interceptor. 6784 // has no indexed interceptor.
6786 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); 6785 __ CmpObjectType(object.reg(), JS_ARRAY_TYPE, tmp1.reg());
6787 deferred->Branch(below); 6786 deferred->Branch(not_equal);
6788 __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset), 6787 __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
6789 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); 6788 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
6790 deferred->Branch(not_zero); 6789 deferred->Branch(not_zero);
6791 6790
6792 // Check the object's elements are in fast case and writable. 6791 // Check the object's elements are in fast case and writable.
6793 __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); 6792 __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
6794 __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), 6793 __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
6795 Heap::kFixedArrayMapRootIndex); 6794 Heap::kFixedArrayMapRootIndex);
6796 deferred->Branch(not_equal); 6795 deferred->Branch(not_equal);
6797 6796
(...skipping 21 matching lines...) Expand all
6819 FixedArray::kHeaderSize)); 6818 FixedArray::kHeaderSize));
6820 6819
6821 // Swap elements. 6820 // Swap elements.
6822 __ movq(object.reg(), Operand(index1.reg(), 0)); 6821 __ movq(object.reg(), Operand(index1.reg(), 0));
6823 __ movq(tmp2.reg(), Operand(index2.reg(), 0)); 6822 __ movq(tmp2.reg(), Operand(index2.reg(), 0));
6824 __ movq(Operand(index2.reg(), 0), object.reg()); 6823 __ movq(Operand(index2.reg(), 0), object.reg());
6825 __ movq(Operand(index1.reg(), 0), tmp2.reg()); 6824 __ movq(Operand(index1.reg(), 0), tmp2.reg());
6826 6825
6827 Label done; 6826 Label done;
6828 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); 6827 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6829 // Possible optimization: do a check that both values are Smis 6828 // Possible optimization: do a check that both values are smis
6830 // (or them and test against Smi mask.) 6829 // (or them and test against Smi mask.)
6831 6830
6832 __ movq(tmp2.reg(), tmp1.reg()); 6831 __ movq(tmp2.reg(), tmp1.reg());
6833 __ RecordWriteHelper(tmp1.reg(), index1.reg(), object.reg()); 6832 __ RecordWriteHelper(tmp1.reg(), index1.reg(), object.reg());
6834 __ RecordWriteHelper(tmp2.reg(), index2.reg(), object.reg()); 6833 __ RecordWriteHelper(tmp2.reg(), index2.reg(), object.reg());
6835 __ bind(&done); 6834 __ bind(&done);
6836 6835
6837 deferred->BindExit(); 6836 deferred->BindExit();
6838 frame_->Push(Factory::undefined_value()); 6837 frame_->Push(Factory::undefined_value());
6839 } 6838 }
(...skipping 1669 matching lines...) Expand 10 before | Expand all | Expand 10 after
8509 if (!key.is_smi()) { 8508 if (!key.is_smi()) {
8510 __ JumpIfNotSmi(key.reg(), deferred->entry_label()); 8509 __ JumpIfNotSmi(key.reg(), deferred->entry_label());
8511 } else if (FLAG_debug_code) { 8510 } else if (FLAG_debug_code) {
8512 __ AbortIfNotSmi(key.reg()); 8511 __ AbortIfNotSmi(key.reg());
8513 } 8512 }
8514 8513
8515 // Check that the receiver is a JSArray. 8514 // Check that the receiver is a JSArray.
8516 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister); 8515 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
8517 deferred->Branch(not_equal); 8516 deferred->Branch(not_equal);
8518 8517
8519 // Check that the key is within bounds. Both the key and the length of
8520 // the JSArray are smis. Use unsigned comparison to handle negative keys.
8521 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
8522 key.reg());
8523 deferred->Branch(below_equal);
8524
8525 // Get the elements array from the receiver and check that it is not a 8518 // Get the elements array from the receiver and check that it is not a
8526 // dictionary. 8519 // dictionary.
8527 __ movq(tmp.reg(), 8520 __ movq(tmp.reg(),
8528 FieldOperand(receiver.reg(), JSArray::kElementsOffset)); 8521 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
8529 8522
8530 // Check whether it is possible to omit the write barrier. If the elements 8523 // Check whether it is possible to omit the write barrier. If the elements
8531 // array is in new space or the value written is a smi we can safely update 8524 // array is in new space or the value written is a smi we can safely update
8532 // the elements array without write barrier. 8525 // the elements array without write barrier.
8533 Label in_new_space; 8526 Label in_new_space;
8534 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); 8527 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
8535 if (!value_is_constant) { 8528 if (!value_is_constant) {
8536 __ JumpIfNotSmi(result.reg(), deferred->entry_label()); 8529 __ JumpIfNotSmi(result.reg(), deferred->entry_label());
8537 } 8530 }
8538 8531
8539 __ bind(&in_new_space); 8532 __ bind(&in_new_space);
8540 // Bind the deferred code patch site to be able to locate the fixed 8533 // Bind the deferred code patch site to be able to locate the fixed
8541 // array map comparison. When debugging, we patch this comparison to 8534 // array map comparison. When debugging, we patch this comparison to
8542 // always fail so that we will hit the IC call in the deferred code 8535 // always fail so that we will hit the IC call in the deferred code
8543 // which will allow the debugger to break for fast case stores. 8536 // which will allow the debugger to break for fast case stores.
8544 __ bind(deferred->patch_site()); 8537 __ bind(deferred->patch_site());
8545 // Avoid using __ to ensure the distance from patch_site 8538 // Avoid using __ to ensure the distance from patch_site
8546 // to the map address is always the same. 8539 // to the map address is always the same.
8547 masm()->movq(kScratchRegister, Factory::fixed_array_map(), 8540 masm()->movq(kScratchRegister, Factory::fixed_array_map(),
8548 RelocInfo::EMBEDDED_OBJECT); 8541 RelocInfo::EMBEDDED_OBJECT);
8549 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), 8542 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
8550 kScratchRegister); 8543 kScratchRegister);
8551 deferred->Branch(not_equal); 8544 deferred->Branch(not_equal);
8552 8545
8546 // Check that the key is within bounds. Both the key and the length of
8547 // the JSArray are smis (because the fixed array check above ensures the
8548 // elements are in fast case). Use unsigned comparison to handle negative
8549 // keys.
8550 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
8551 key.reg());
8552 deferred->Branch(below_equal);
8553
8553 // Store the value. 8554 // Store the value.
8554 SmiIndex index = 8555 SmiIndex index =
8555 masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); 8556 masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
8556 __ movq(FieldOperand(tmp.reg(), 8557 __ movq(FieldOperand(tmp.reg(),
8557 index.reg, 8558 index.reg,
8558 index.scale, 8559 index.scale,
8559 FixedArray::kHeaderSize), 8560 FixedArray::kHeaderSize),
8560 result.reg()); 8561 result.reg());
8561 __ IncrementCounter(&Counters::keyed_store_inline, 1); 8562 __ IncrementCounter(&Counters::keyed_store_inline, 1);
8562 8563
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
8834 } 8835 }
8835 8836
8836 #endif 8837 #endif
8837 8838
8838 8839
8839 #undef __ 8840 #undef __
8840 8841
8841 } } // namespace v8::internal 8842 } } // namespace v8::internal
8842 8843
8843 #endif // V8_TARGET_ARCH_X64 8844 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698