Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(264)

Side by Side Diff: src/crankshaft/ia32/lithium-codegen-ia32.cc

Issue 1815213002: Extends testb and cmpb/cmpw instruction support in the ia32 assembler. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix a copy/pasted erroneous DCHECK. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/full-codegen/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_IA32 5 #if V8_TARGET_ARCH_IA32
6 6
7 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" 7 #include "src/crankshaft/ia32/lithium-codegen-ia32.h"
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/code-factory.h" 10 #include "src/code-factory.h"
(...skipping 1887 matching lines...) Expand 10 before | Expand all | Expand 10 after
1898 1898
1899 Register map = no_reg; // Keep the compiler happy. 1899 Register map = no_reg; // Keep the compiler happy.
1900 if (expected.NeedsMap()) { 1900 if (expected.NeedsMap()) {
1901 map = ToRegister(instr->temp()); 1901 map = ToRegister(instr->temp());
1902 DCHECK(!map.is(reg)); 1902 DCHECK(!map.is(reg));
1903 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset)); 1903 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
1904 1904
1905 if (expected.CanBeUndetectable()) { 1905 if (expected.CanBeUndetectable()) {
1906 // Undetectable -> false. 1906 // Undetectable -> false.
1907 __ test_b(FieldOperand(map, Map::kBitFieldOffset), 1907 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
1908 1 << Map::kIsUndetectable); 1908 Immediate(1 << Map::kIsUndetectable));
1909 __ j(not_zero, instr->FalseLabel(chunk_)); 1909 __ j(not_zero, instr->FalseLabel(chunk_));
1910 } 1910 }
1911 } 1911 }
1912 1912
1913 if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) { 1913 if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) {
1914 // spec object -> true. 1914 // spec object -> true.
1915 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); 1915 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
1916 __ j(above_equal, instr->TrueLabel(chunk_)); 1916 __ j(above_equal, instr->TrueLabel(chunk_));
1917 } 1917 }
1918 1918
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
2124 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { 2124 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2125 Register input = ToRegister(instr->value()); 2125 Register input = ToRegister(instr->value());
2126 Register temp = ToRegister(instr->temp()); 2126 Register temp = ToRegister(instr->temp());
2127 2127
2128 if (!instr->hydrogen()->value()->type().IsHeapObject()) { 2128 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2129 STATIC_ASSERT(kSmiTag == 0); 2129 STATIC_ASSERT(kSmiTag == 0);
2130 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); 2130 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2131 } 2131 }
2132 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); 2132 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
2133 __ test_b(FieldOperand(temp, Map::kBitFieldOffset), 2133 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
2134 1 << Map::kIsUndetectable); 2134 Immediate(1 << Map::kIsUndetectable));
2135 EmitBranch(instr, not_zero); 2135 EmitBranch(instr, not_zero);
2136 } 2136 }
2137 2137
2138 2138
2139 static Condition ComputeCompareCondition(Token::Value op) { 2139 static Condition ComputeCompareCondition(Token::Value op) {
2140 switch (op) { 2140 switch (op) {
2141 case Token::EQ_STRICT: 2141 case Token::EQ_STRICT:
2142 case Token::EQ: 2142 case Token::EQ:
2143 return equal; 2143 return equal;
2144 case Token::LT: 2144 case Token::LT:
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
2315 EmitFalseBranch(instr, zero); 2315 EmitFalseBranch(instr, zero);
2316 } 2316 }
2317 2317
2318 // Loop through the {object}s prototype chain looking for the {prototype}. 2318 // Loop through the {object}s prototype chain looking for the {prototype}.
2319 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); 2319 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
2320 Label loop; 2320 Label loop;
2321 __ bind(&loop); 2321 __ bind(&loop);
2322 2322
2323 // Deoptimize if the object needs to be access checked. 2323 // Deoptimize if the object needs to be access checked.
2324 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), 2324 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset),
2325 1 << Map::kIsAccessCheckNeeded); 2325 Immediate(1 << Map::kIsAccessCheckNeeded));
2326 DeoptimizeIf(not_zero, instr, Deoptimizer::kAccessCheck); 2326 DeoptimizeIf(not_zero, instr, Deoptimizer::kAccessCheck);
2327 // Deoptimize for proxies. 2327 // Deoptimize for proxies.
2328 __ CmpInstanceType(object_map, JS_PROXY_TYPE); 2328 __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2329 DeoptimizeIf(equal, instr, Deoptimizer::kProxy); 2329 DeoptimizeIf(equal, instr, Deoptimizer::kProxy);
2330 2330
2331 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); 2331 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
2332 __ cmp(object_prototype, prototype); 2332 __ cmp(object_prototype, prototype);
2333 EmitTrueBranch(instr, equal); 2333 EmitTrueBranch(instr, equal);
2334 __ cmp(object_prototype, factory()->null_value()); 2334 __ cmp(object_prototype, factory()->null_value());
2335 EmitFalseBranch(instr, equal); 2335 EmitFalseBranch(instr, equal);
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after
2860 Label receiver_ok, global_object; 2860 Label receiver_ok, global_object;
2861 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; 2861 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
2862 Register scratch = ToRegister(instr->temp()); 2862 Register scratch = ToRegister(instr->temp());
2863 2863
2864 if (!instr->hydrogen()->known_function()) { 2864 if (!instr->hydrogen()->known_function()) {
2865 // Do not transform the receiver to object for strict mode 2865 // Do not transform the receiver to object for strict mode
2866 // functions. 2866 // functions.
2867 __ mov(scratch, 2867 __ mov(scratch,
2868 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 2868 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2869 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset), 2869 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
2870 1 << SharedFunctionInfo::kStrictModeBitWithinByte); 2870 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
2871 __ j(not_equal, &receiver_ok, dist); 2871 __ j(not_equal, &receiver_ok, dist);
2872 2872
2873 // Do not transform the receiver to object for builtins. 2873 // Do not transform the receiver to object for builtins.
2874 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset), 2874 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
2875 1 << SharedFunctionInfo::kNativeBitWithinByte); 2875 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
2876 __ j(not_equal, &receiver_ok, dist); 2876 __ j(not_equal, &receiver_ok, dist);
2877 } 2877 }
2878 2878
2879 // Normal function. Replace undefined or null with global receiver. 2879 // Normal function. Replace undefined or null with global receiver.
2880 __ cmp(receiver, factory()->null_value()); 2880 __ cmp(receiver, factory()->null_value());
2881 __ j(equal, &global_object, Label::kNear); 2881 __ j(equal, &global_object, Label::kNear);
2882 __ cmp(receiver, factory()->undefined_value()); 2882 __ cmp(receiver, factory()->undefined_value());
2883 __ j(equal, &global_object, Label::kNear); 2883 __ j(equal, &global_object, Label::kNear);
2884 2884
2885 // The receiver should be a JS object. 2885 // The receiver should be a JS object.
(...skipping 1475 matching lines...) Expand 10 before | Expand all | Expand 10 after
4361 4361
4362 // Heap number to XMM conversion. 4362 // Heap number to XMM conversion.
4363 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 4363 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4364 4364
4365 if (deoptimize_on_minus_zero) { 4365 if (deoptimize_on_minus_zero) {
4366 XMMRegister xmm_scratch = double_scratch0(); 4366 XMMRegister xmm_scratch = double_scratch0();
4367 __ xorps(xmm_scratch, xmm_scratch); 4367 __ xorps(xmm_scratch, xmm_scratch);
4368 __ ucomisd(result_reg, xmm_scratch); 4368 __ ucomisd(result_reg, xmm_scratch);
4369 __ j(not_zero, &done, Label::kNear); 4369 __ j(not_zero, &done, Label::kNear);
4370 __ movmskpd(temp_reg, result_reg); 4370 __ movmskpd(temp_reg, result_reg);
4371 __ test_b(temp_reg, 1); 4371 __ test_b(temp_reg, Immediate(1));
4372 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero); 4372 DeoptimizeIf(not_zero, instr, Deoptimizer::kMinusZero);
4373 } 4373 }
4374 __ jmp(&done, Label::kNear); 4374 __ jmp(&done, Label::kNear);
4375 4375
4376 if (can_convert_undefined_to_nan) { 4376 if (can_convert_undefined_to_nan) {
4377 __ bind(&convert); 4377 __ bind(&convert);
4378 4378
4379 // Convert undefined to NaN. 4379 // Convert undefined to NaN.
4380 __ cmp(input_reg, factory()->undefined_value()); 4380 __ cmp(input_reg, factory()->undefined_value());
4381 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined); 4381 DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumberUndefined);
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
4583 } 4583 }
4584 4584
4585 4585
4586 void LCodeGen::DoCheckArrayBufferNotNeutered( 4586 void LCodeGen::DoCheckArrayBufferNotNeutered(
4587 LCheckArrayBufferNotNeutered* instr) { 4587 LCheckArrayBufferNotNeutered* instr) {
4588 Register view = ToRegister(instr->view()); 4588 Register view = ToRegister(instr->view());
4589 Register scratch = ToRegister(instr->scratch()); 4589 Register scratch = ToRegister(instr->scratch());
4590 4590
4591 __ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset)); 4591 __ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset));
4592 __ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset), 4592 __ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset),
4593 1 << JSArrayBuffer::WasNeutered::kShift); 4593 Immediate(1 << JSArrayBuffer::WasNeutered::kShift));
4594 DeoptimizeIf(not_zero, instr, Deoptimizer::kOutOfBounds); 4594 DeoptimizeIf(not_zero, instr, Deoptimizer::kOutOfBounds);
4595 } 4595 }
4596 4596
4597 4597
4598 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 4598 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4599 Register input = ToRegister(instr->value()); 4599 Register input = ToRegister(instr->value());
4600 Register temp = ToRegister(instr->temp()); 4600 Register temp = ToRegister(instr->temp());
4601 4601
4602 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset)); 4602 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
4603 4603
4604 if (instr->hydrogen()->is_interval_check()) { 4604 if (instr->hydrogen()->is_interval_check()) {
4605 InstanceType first; 4605 InstanceType first;
4606 InstanceType last; 4606 InstanceType last;
4607 instr->hydrogen()->GetCheckInterval(&first, &last); 4607 instr->hydrogen()->GetCheckInterval(&first, &last);
4608 4608
4609 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), 4609 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(first));
4610 static_cast<int8_t>(first));
4611 4610
4612 // If there is only one type in the interval check for equality. 4611 // If there is only one type in the interval check for equality.
4613 if (first == last) { 4612 if (first == last) {
4614 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); 4613 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType);
4615 } else { 4614 } else {
4616 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType); 4615 DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType);
4617 // Omit check for the last type. 4616 // Omit check for the last type.
4618 if (last != LAST_TYPE) { 4617 if (last != LAST_TYPE) {
4619 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), 4618 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(last));
4620 static_cast<int8_t>(last));
4621 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType); 4619 DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType);
4622 } 4620 }
4623 } 4621 }
4624 } else { 4622 } else {
4625 uint8_t mask; 4623 uint8_t mask;
4626 uint8_t tag; 4624 uint8_t tag;
4627 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); 4625 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4628 4626
4629 if (base::bits::IsPowerOfTwo32(mask)) { 4627 if (base::bits::IsPowerOfTwo32(mask)) {
4630 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag)); 4628 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
4631 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask); 4629 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(mask));
4632 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, 4630 DeoptimizeIf(tag == 0 ? not_zero : zero, instr,
4633 Deoptimizer::kWrongInstanceType); 4631 Deoptimizer::kWrongInstanceType);
4634 } else { 4632 } else {
4635 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); 4633 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4636 __ and_(temp, mask); 4634 __ and_(temp, mask);
4637 __ cmp(temp, tag); 4635 __ cmp(temp, tag);
4638 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType); 4636 DeoptimizeIf(not_equal, instr, Deoptimizer::kWrongInstanceType);
4639 } 4637 }
4640 } 4638 }
4641 } 4639 }
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
4971 __ cmp(input, factory()->false_value()); 4969 __ cmp(input, factory()->false_value());
4972 final_branch_condition = equal; 4970 final_branch_condition = equal;
4973 4971
4974 } else if (String::Equals(type_name, factory()->undefined_string())) { 4972 } else if (String::Equals(type_name, factory()->undefined_string())) {
4975 __ cmp(input, factory()->null_value()); 4973 __ cmp(input, factory()->null_value());
4976 __ j(equal, false_label, false_distance); 4974 __ j(equal, false_label, false_distance);
4977 __ JumpIfSmi(input, false_label, false_distance); 4975 __ JumpIfSmi(input, false_label, false_distance);
4978 // Check for undetectable objects => true. 4976 // Check for undetectable objects => true.
4979 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); 4977 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4980 __ test_b(FieldOperand(input, Map::kBitFieldOffset), 4978 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4981 1 << Map::kIsUndetectable); 4979 Immediate(1 << Map::kIsUndetectable));
4982 final_branch_condition = not_zero; 4980 final_branch_condition = not_zero;
4983 4981
4984 } else if (String::Equals(type_name, factory()->function_string())) { 4982 } else if (String::Equals(type_name, factory()->function_string())) {
4985 __ JumpIfSmi(input, false_label, false_distance); 4983 __ JumpIfSmi(input, false_label, false_distance);
4986 // Check for callable and not undetectable objects => true. 4984 // Check for callable and not undetectable objects => true.
4987 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); 4985 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4988 __ movzx_b(input, FieldOperand(input, Map::kBitFieldOffset)); 4986 __ movzx_b(input, FieldOperand(input, Map::kBitFieldOffset));
4989 __ and_(input, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)); 4987 __ and_(input, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4990 __ cmp(input, 1 << Map::kIsCallable); 4988 __ cmp(input, 1 << Map::kIsCallable);
4991 final_branch_condition = equal; 4989 final_branch_condition = equal;
4992 4990
4993 } else if (String::Equals(type_name, factory()->object_string())) { 4991 } else if (String::Equals(type_name, factory()->object_string())) {
4994 __ JumpIfSmi(input, false_label, false_distance); 4992 __ JumpIfSmi(input, false_label, false_distance);
4995 __ cmp(input, factory()->null_value()); 4993 __ cmp(input, factory()->null_value());
4996 __ j(equal, true_label, true_distance); 4994 __ j(equal, true_label, true_distance);
4997 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 4995 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4998 __ CmpObjectType(input, FIRST_JS_RECEIVER_TYPE, input); 4996 __ CmpObjectType(input, FIRST_JS_RECEIVER_TYPE, input);
4999 __ j(below, false_label, false_distance); 4997 __ j(below, false_label, false_distance);
5000 // Check for callable or undetectable objects => false. 4998 // Check for callable or undetectable objects => false.
5001 __ test_b(FieldOperand(input, Map::kBitFieldOffset), 4999 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
5002 (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)); 5000 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
5003 final_branch_condition = zero; 5001 final_branch_condition = zero;
5004 5002
5005 // clang-format off 5003 // clang-format off
5006 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ 5004 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
5007 } else if (String::Equals(type_name, factory()->type##_string())) { \ 5005 } else if (String::Equals(type_name, factory()->type##_string())) { \
5008 __ JumpIfSmi(input, false_label, false_distance); \ 5006 __ JumpIfSmi(input, false_label, false_distance); \
5009 __ cmp(FieldOperand(input, HeapObject::kMapOffset), \ 5007 __ cmp(FieldOperand(input, HeapObject::kMapOffset), \
5010 factory()->type##_map()); \ 5008 factory()->type##_map()); \
5011 final_branch_condition = equal; 5009 final_branch_condition = equal;
5012 SIMD128_TYPES(SIMD128_TYPE) 5010 SIMD128_TYPES(SIMD128_TYPE)
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
5266 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context); 5264 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context);
5267 } 5265 }
5268 5266
5269 5267
5270 #undef __ 5268 #undef __
5271 5269
5272 } // namespace internal 5270 } // namespace internal
5273 } // namespace v8 5271 } // namespace v8
5274 5272
5275 #endif // V8_TARGET_ARCH_IA32 5273 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « no previous file | src/full-codegen/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698