Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/change-lowering.h" | 5 #include "src/compiler/change-lowering.h" |
| 6 | 6 |
| 7 #include "src/address-map.h" | 7 #include "src/address-map.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/compiler/js-graph.h" | 9 #include "src/compiler/js-graph.h" |
| 10 #include "src/compiler/linkage.h" | 10 #include "src/compiler/linkage.h" |
| 11 #include "src/compiler/machine-operator.h" | 11 #include "src/compiler/machine-operator.h" |
| 12 #include "src/compiler/node-properties.h" | 12 #include "src/compiler/node-properties.h" |
| 13 #include "src/compiler/operator-properties.h" | 13 #include "src/compiler/operator-properties.h" |
| 14 #include "src/compiler/simplified-operator.h" | 14 #include "src/compiler/simplified-operator.h" |
| 15 | 15 |
| 16 namespace v8 { | 16 namespace v8 { |
| 17 namespace internal { | 17 namespace internal { |
| 18 namespace compiler { | 18 namespace compiler { |
| 19 | 19 |
| 20 ChangeLowering::~ChangeLowering() {} | 20 ChangeLowering::~ChangeLowering() {} |
| 21 | 21 |
| 22 | 22 |
| 23 Reduction ChangeLowering::Reduce(Node* node) { | 23 Reduction ChangeLowering::Reduce(Node* node) { |
| 24 Node* control = graph()->start(); | 24 Node* control = graph()->start(); |
| 25 switch (node->opcode()) { | 25 switch (node->opcode()) { |
| 26 case IrOpcode::kChangeBitToBool: | 26 case IrOpcode::kChangeBitToBool: |
| 27 return ChangeBitToBool(node->InputAt(0), control); | 27 return ReduceChangeBitToBool(node->InputAt(0), control); |
| 28 case IrOpcode::kChangeBoolToBit: | 28 case IrOpcode::kChangeBoolToBit: |
| 29 return ChangeBoolToBit(node->InputAt(0)); | 29 return ReduceChangeBoolToBit(node->InputAt(0)); |
| 30 case IrOpcode::kChangeInt31ToTagged: | 30 case IrOpcode::kChangeInt31ToTagged: |
| 31 return ChangeInt31ToTagged(node->InputAt(0), control); | 31 return ReduceChangeInt31ToTagged(node->InputAt(0), control); |
| 32 case IrOpcode::kChangeTaggedSignedToInt32: | 32 case IrOpcode::kChangeTaggedSignedToInt32: |
| 33 return ChangeTaggedSignedToInt32(node->InputAt(0)); | 33 return ReduceChangeTaggedSignedToInt32(node->InputAt(0)); |
| 34 case IrOpcode::kLoadField: | 34 case IrOpcode::kLoadField: |
| 35 return LoadField(node); | 35 return ReduceLoadField(node); |
| 36 case IrOpcode::kStoreField: | 36 case IrOpcode::kStoreField: |
| 37 return StoreField(node); | 37 return ReduceStoreField(node); |
| 38 case IrOpcode::kLoadElement: | 38 case IrOpcode::kLoadElement: |
| 39 return LoadElement(node); | 39 return ReduceLoadElement(node); |
| 40 case IrOpcode::kStoreElement: | 40 case IrOpcode::kStoreElement: |
| 41 return StoreElement(node); | 41 return ReduceStoreElement(node); |
| 42 case IrOpcode::kAllocate: | 42 case IrOpcode::kAllocate: |
| 43 return Allocate(node); | 43 return ReduceAllocate(node); |
| 44 case IrOpcode::kObjectIsSmi: | 44 case IrOpcode::kObjectIsSmi: |
| 45 return ObjectIsSmi(node); | 45 return ReduceObjectIsSmi(node); |
| 46 case IrOpcode::kChangeInt32ToTagged: | |
| 47 case IrOpcode::kChangeUint32ToTagged: | |
| 48 case IrOpcode::kChangeFloat64ToTagged: | |
| 49 FATAL("Changes should be already lowered during effect linearization."); | |
| 50 break; | |
| 51 default: | 46 default: |
| 52 return NoChange(); | 47 return NoChange(); |
| 53 } | 48 } |
| 54 UNREACHABLE(); | 49 UNREACHABLE(); |
| 55 return NoChange(); | 50 return NoChange(); |
| 56 } | 51 } |
| 57 | 52 |
| 58 | |
| 59 Node* ChangeLowering::HeapNumberValueIndexConstant() { | |
| 60 return jsgraph()->IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag); | |
| 61 } | |
| 62 | |
| 63 Node* ChangeLowering::SmiShiftBitsConstant() { | 53 Node* ChangeLowering::SmiShiftBitsConstant() { |
| 64 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize); | 54 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize); |
| 65 } | 55 } |
| 66 | 56 |
| 67 Node* ChangeLowering::ChangeInt32ToFloat64(Node* value) { | |
| 68 return graph()->NewNode(machine()->ChangeInt32ToFloat64(), value); | |
| 69 } | |
| 70 | |
| 71 Node* ChangeLowering::ChangeInt32ToSmi(Node* value) { | 57 Node* ChangeLowering::ChangeInt32ToSmi(Node* value) { |
| 72 if (machine()->Is64()) { | 58 if (machine()->Is64()) { |
| 73 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value); | 59 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value); |
| 74 } | 60 } |
| 75 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); | 61 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); |
| 76 } | 62 } |
| 77 | 63 |
| 78 | |
| 79 Node* ChangeLowering::ChangeSmiToFloat64(Node* value) { | |
| 80 return ChangeInt32ToFloat64(ChangeSmiToWord32(value)); | |
| 81 } | |
| 82 | |
| 83 Node* ChangeLowering::ChangeSmiToWord32(Node* value) { | 64 Node* ChangeLowering::ChangeSmiToWord32(Node* value) { |
| 84 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant()); | 65 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant()); |
| 85 if (machine()->Is64()) { | 66 if (machine()->Is64()) { |
| 86 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value); | 67 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value); |
| 87 } | 68 } |
| 88 return value; | 69 return value; |
| 89 } | 70 } |
| 90 | 71 |
| 91 | 72 |
| 92 Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) { | 73 Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) { |
| 93 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value); | 74 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value); |
| 94 } | 75 } |
| 95 | 76 |
| 96 | 77 Reduction ChangeLowering::ReduceChangeBitToBool(Node* value, Node* control) { |
| 97 Node* ChangeLowering::ChangeUint32ToSmi(Node* value) { | |
| 98 if (machine()->Is64()) { | |
| 99 value = graph()->NewNode(machine()->ChangeUint32ToUint64(), value); | |
| 100 } | |
| 101 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant()); | |
| 102 } | |
| 103 | |
| 104 | |
| 105 Node* ChangeLowering::LoadHeapNumberValue(Node* value, Node* control) { | |
| 106 return graph()->NewNode(machine()->Load(MachineType::Float64()), value, | |
| 107 HeapNumberValueIndexConstant(), graph()->start(), | |
| 108 control); | |
| 109 } | |
| 110 | |
| 111 | |
| 112 Node* ChangeLowering::TestNotSmi(Node* value) { | |
| 113 STATIC_ASSERT(kSmiTag == 0); | |
| 114 STATIC_ASSERT(kSmiTagMask == 1); | |
| 115 return graph()->NewNode(machine()->WordAnd(), value, | |
| 116 jsgraph()->IntPtrConstant(kSmiTagMask)); | |
| 117 } | |
| 118 | |
| 119 | |
| 120 Reduction ChangeLowering::ChangeBitToBool(Node* value, Node* control) { | |
| 121 return Replace( | 78 return Replace( |
| 122 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value, | 79 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value, |
| 123 jsgraph()->TrueConstant(), jsgraph()->FalseConstant())); | 80 jsgraph()->TrueConstant(), jsgraph()->FalseConstant())); |
| 124 } | 81 } |
| 125 | 82 |
| 126 | 83 Reduction ChangeLowering::ReduceChangeBoolToBit(Node* value) { |
| 127 Reduction ChangeLowering::ChangeBoolToBit(Node* value) { | |
| 128 return Replace(graph()->NewNode(machine()->WordEqual(), value, | 84 return Replace(graph()->NewNode(machine()->WordEqual(), value, |
| 129 jsgraph()->TrueConstant())); | 85 jsgraph()->TrueConstant())); |
| 130 } | 86 } |
| 131 | 87 |
| 132 Reduction ChangeLowering::ChangeInt31ToTagged(Node* value, Node* control) { | 88 Reduction ChangeLowering::ReduceChangeInt31ToTagged(Node* value, |
| 89 Node* control) { | |
| 133 return Replace(ChangeInt32ToSmi(value)); | 90 return Replace(ChangeInt32ToSmi(value)); |
| 134 } | 91 } |
| 135 | 92 |
| 136 Reduction ChangeLowering::ChangeTaggedSignedToInt32(Node* value) { | 93 Reduction ChangeLowering::ReduceChangeTaggedSignedToInt32(Node* value) { |
| 137 return Replace(ChangeSmiToWord32(value)); | 94 return Replace(ChangeSmiToWord32(value)); |
| 138 } | 95 } |
| 139 | 96 |
| 140 namespace { | 97 namespace { |
| 141 | 98 |
| 142 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, | 99 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, |
| 143 MachineRepresentation representation, | 100 MachineRepresentation representation) { |
| 144 Type* field_type, Type* input_type) { | 101 // if (field_type->Is(Type::TaggedSigned()) || |
|
Michael Starzinger
2016/04/28 12:18:17
Can we at least leave a TODO here describing what
Benedikt Meurer
2016/04/28 12:20:52
Done
| |
| 145 if (field_type->Is(Type::TaggedSigned()) || | 102 // input_type->Is(Type::TaggedSigned())) { |
| 146 input_type->Is(Type::TaggedSigned())) { | 103 // // Write barriers are only for writes of heap objects. |
| 147 // Write barriers are only for writes of heap objects. | 104 // return kNoWriteBarrier; |
| 148 return kNoWriteBarrier; | 105 // } |
| 149 } | 106 // if (input_type->Is(Type::BooleanOrNullOrUndefined())) { |
| 150 if (input_type->Is(Type::BooleanOrNullOrUndefined())) { | 107 // // Write barriers are not necessary when storing true, false, null or |
| 151 // Write barriers are not necessary when storing true, false, null or | 108 // // undefined, because these special oddballs are always in the root |
| 152 // undefined, because these special oddballs are always in the root set. | 109 // set. |
| 153 return kNoWriteBarrier; | 110 // return kNoWriteBarrier; |
| 154 } | 111 // } |
| 155 if (base_is_tagged == kTaggedBase && | 112 if (base_is_tagged == kTaggedBase && |
| 156 representation == MachineRepresentation::kTagged) { | 113 representation == MachineRepresentation::kTagged) { |
| 157 if (input_type->IsConstant() && | 114 // if (input_type->IsConstant() && |
| 158 input_type->AsConstant()->Value()->IsHeapObject()) { | 115 // input_type->AsConstant()->Value()->IsHeapObject()) { |
| 159 Handle<HeapObject> input = | 116 // Handle<HeapObject> input = |
| 160 Handle<HeapObject>::cast(input_type->AsConstant()->Value()); | 117 // Handle<HeapObject>::cast(input_type->AsConstant()->Value()); |
| 161 if (input->IsMap()) { | 118 // if (input->IsMap()) { |
| 162 // Write barriers for storing maps are cheaper. | 119 // // Write barriers for storing maps are cheaper. |
| 163 return kMapWriteBarrier; | 120 // return kMapWriteBarrier; |
| 164 } | 121 // } |
| 165 Isolate* const isolate = input->GetIsolate(); | 122 // Isolate* const isolate = input->GetIsolate(); |
| 166 RootIndexMap root_index_map(isolate); | 123 // RootIndexMap root_index_map(isolate); |
| 167 int root_index = root_index_map.Lookup(*input); | 124 // int root_index = root_index_map.Lookup(*input); |
| 168 if (root_index != RootIndexMap::kInvalidRootIndex && | 125 // if (root_index != RootIndexMap::kInvalidRootIndex && |
| 169 isolate->heap()->RootIsImmortalImmovable(root_index)) { | 126 // isolate->heap()->RootIsImmortalImmovable(root_index)) { |
| 170 // Write barriers are unnecessary for immortal immovable roots. | 127 // // Write barriers are unnecessary for immortal immovable roots. |
| 171 return kNoWriteBarrier; | 128 // return kNoWriteBarrier; |
| 172 } | 129 // } |
| 173 } | 130 // } |
| 174 if (field_type->Is(Type::TaggedPointer()) || | 131 // if (field_type->Is(Type::TaggedPointer()) || |
| 175 input_type->Is(Type::TaggedPointer())) { | 132 // input_type->Is(Type::TaggedPointer())) { |
| 176 // Write barriers for heap objects don't need a Smi check. | 133 // // Write barriers for heap objects don't need a Smi check. |
| 177 return kPointerWriteBarrier; | 134 // return kPointerWriteBarrier; |
| 178 } | 135 // } |
| 179 // Write barriers are only for writes into heap objects (i.e. tagged base). | 136 // Write barriers are only for writes into heap objects (i.e. tagged base). |
| 180 return kFullWriteBarrier; | 137 return kFullWriteBarrier; |
| 181 } | 138 } |
| 182 return kNoWriteBarrier; | 139 return kNoWriteBarrier; |
| 183 } | 140 } |
| 184 | 141 |
| 185 | |
| 186 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, | 142 WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged, |
| 187 MachineRepresentation representation, | 143 MachineRepresentation representation, |
| 188 int field_offset, Type* field_type, | 144 int field_offset) { |
| 189 Type* input_type) { | |
| 190 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) { | 145 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) { |
| 191 // Write barriers for storing maps are cheaper. | 146 // Write barriers for storing maps are cheaper. |
| 192 return kMapWriteBarrier; | 147 return kMapWriteBarrier; |
| 193 } | 148 } |
| 194 return ComputeWriteBarrierKind(base_is_tagged, representation, field_type, | 149 return ComputeWriteBarrierKind(base_is_tagged, representation); |
| 195 input_type); | |
| 196 } | 150 } |
| 197 | 151 |
| 198 } // namespace | 152 } // namespace |
| 199 | 153 |
| 200 | 154 Reduction ChangeLowering::ReduceLoadField(Node* node) { |
| 201 Reduction ChangeLowering::LoadField(Node* node) { | |
| 202 const FieldAccess& access = FieldAccessOf(node->op()); | 155 const FieldAccess& access = FieldAccessOf(node->op()); |
| 203 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); | 156 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); |
| 204 node->InsertInput(graph()->zone(), 1, offset); | 157 node->InsertInput(graph()->zone(), 1, offset); |
| 205 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); | 158 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); |
| 206 return Changed(node); | 159 return Changed(node); |
| 207 } | 160 } |
| 208 | 161 |
| 209 | 162 Reduction ChangeLowering::ReduceStoreField(Node* node) { |
| 210 Reduction ChangeLowering::StoreField(Node* node) { | |
| 211 const FieldAccess& access = FieldAccessOf(node->op()); | 163 const FieldAccess& access = FieldAccessOf(node->op()); |
| 212 Type* type = NodeProperties::GetType(node->InputAt(1)); | |
| 213 WriteBarrierKind kind = ComputeWriteBarrierKind( | 164 WriteBarrierKind kind = ComputeWriteBarrierKind( |
| 214 access.base_is_tagged, access.machine_type.representation(), | 165 access.base_is_tagged, access.machine_type.representation(), |
| 215 access.offset, access.type, type); | 166 access.offset); |
| 216 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); | 167 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag()); |
| 217 node->InsertInput(graph()->zone(), 1, offset); | 168 node->InsertInput(graph()->zone(), 1, offset); |
| 218 NodeProperties::ChangeOp(node, | 169 NodeProperties::ChangeOp(node, |
| 219 machine()->Store(StoreRepresentation( | 170 machine()->Store(StoreRepresentation( |
| 220 access.machine_type.representation(), kind))); | 171 access.machine_type.representation(), kind))); |
| 221 return Changed(node); | 172 return Changed(node); |
| 222 } | 173 } |
| 223 | 174 |
| 224 | 175 |
| 225 Node* ChangeLowering::ComputeIndex(const ElementAccess& access, | 176 Node* ChangeLowering::ComputeIndex(const ElementAccess& access, |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 238 } | 189 } |
| 239 if (machine()->Is64()) { | 190 if (machine()->Is64()) { |
| 240 // TODO(turbofan): This is probably only correct for typed arrays, and only | 191 // TODO(turbofan): This is probably only correct for typed arrays, and only |
| 241 // if the typed arrays are at most 2GiB in size, which happens to match | 192 // if the typed arrays are at most 2GiB in size, which happens to match |
| 242 // exactly our current situation. | 193 // exactly our current situation. |
| 243 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index); | 194 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index); |
| 244 } | 195 } |
| 245 return index; | 196 return index; |
| 246 } | 197 } |
| 247 | 198 |
| 248 | 199 Reduction ChangeLowering::ReduceLoadElement(Node* node) { |
| 249 Reduction ChangeLowering::LoadElement(Node* node) { | |
| 250 const ElementAccess& access = ElementAccessOf(node->op()); | 200 const ElementAccess& access = ElementAccessOf(node->op()); |
| 251 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); | 201 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); |
| 252 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); | 202 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type)); |
| 253 return Changed(node); | 203 return Changed(node); |
| 254 } | 204 } |
| 255 | 205 |
| 256 | 206 Reduction ChangeLowering::ReduceStoreElement(Node* node) { |
| 257 Reduction ChangeLowering::StoreElement(Node* node) { | |
| 258 const ElementAccess& access = ElementAccessOf(node->op()); | 207 const ElementAccess& access = ElementAccessOf(node->op()); |
| 259 Type* type = NodeProperties::GetType(node->InputAt(2)); | |
| 260 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); | 208 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1))); |
| 261 NodeProperties::ChangeOp( | 209 NodeProperties::ChangeOp( |
| 262 node, machine()->Store(StoreRepresentation( | 210 node, |
| 263 access.machine_type.representation(), | 211 machine()->Store(StoreRepresentation( |
| 264 ComputeWriteBarrierKind(access.base_is_tagged, | 212 access.machine_type.representation(), |
| 265 access.machine_type.representation(), | 213 ComputeWriteBarrierKind(access.base_is_tagged, |
| 266 access.type, type)))); | 214 access.machine_type.representation())))); |
| 267 return Changed(node); | 215 return Changed(node); |
| 268 } | 216 } |
| 269 | 217 |
| 270 | 218 Reduction ChangeLowering::ReduceAllocate(Node* node) { |
| 271 Reduction ChangeLowering::Allocate(Node* node) { | |
| 272 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op()); | 219 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op()); |
| 273 Callable callable = CodeFactory::Allocate(isolate(), pretenure); | 220 Callable callable = CodeFactory::Allocate(isolate(), pretenure); |
| 274 Node* target = jsgraph()->HeapConstant(callable.code()); | 221 Node* target = jsgraph()->HeapConstant(callable.code()); |
| 275 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | 222 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( |
| 276 isolate(), jsgraph()->zone(), callable.descriptor(), 0, | 223 isolate(), jsgraph()->zone(), callable.descriptor(), 0, |
| 277 CallDescriptor::kNoFlags, Operator::kNoThrow); | 224 CallDescriptor::kNoFlags, Operator::kNoThrow); |
| 278 const Operator* op = common()->Call(descriptor); | 225 const Operator* op = common()->Call(descriptor); |
| 279 node->InsertInput(graph()->zone(), 0, target); | 226 node->InsertInput(graph()->zone(), 0, target); |
| 280 node->InsertInput(graph()->zone(), 2, jsgraph()->NoContextConstant()); | 227 node->InsertInput(graph()->zone(), 2, jsgraph()->NoContextConstant()); |
| 281 NodeProperties::ChangeOp(node, op); | 228 NodeProperties::ChangeOp(node, op); |
| 282 return Changed(node); | 229 return Changed(node); |
| 283 } | 230 } |
| 284 | 231 |
| 285 Node* ChangeLowering::IsSmi(Node* value) { | 232 Reduction ChangeLowering::ReduceObjectIsSmi(Node* node) { |
| 286 return graph()->NewNode( | |
| 287 machine()->WordEqual(), | |
| 288 graph()->NewNode(machine()->WordAnd(), value, | |
| 289 jsgraph()->IntPtrConstant(kSmiTagMask)), | |
| 290 jsgraph()->IntPtrConstant(kSmiTag)); | |
| 291 } | |
| 292 | |
| 293 Node* ChangeLowering::LoadHeapObjectMap(Node* object, Node* control) { | |
| 294 return graph()->NewNode( | |
| 295 machine()->Load(MachineType::AnyTagged()), object, | |
| 296 jsgraph()->IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), | |
| 297 graph()->start(), control); | |
| 298 } | |
| 299 | |
| 300 Node* ChangeLowering::LoadMapBitField(Node* map) { | |
| 301 return graph()->NewNode( | |
| 302 machine()->Load(MachineType::Uint8()), map, | |
| 303 jsgraph()->IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag), | |
| 304 graph()->start(), graph()->start()); | |
| 305 } | |
| 306 | |
| 307 Node* ChangeLowering::LoadMapInstanceType(Node* map) { | |
| 308 return graph()->NewNode( | |
| 309 machine()->Load(MachineType::Uint8()), map, | |
| 310 jsgraph()->IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag), | |
| 311 graph()->start(), graph()->start()); | |
| 312 } | |
| 313 | |
| 314 Reduction ChangeLowering::ObjectIsSmi(Node* node) { | |
| 315 node->ReplaceInput(0, | 233 node->ReplaceInput(0, |
| 316 graph()->NewNode(machine()->WordAnd(), node->InputAt(0), | 234 graph()->NewNode(machine()->WordAnd(), node->InputAt(0), |
| 317 jsgraph()->IntPtrConstant(kSmiTagMask))); | 235 jsgraph()->IntPtrConstant(kSmiTagMask))); |
| 318 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag)); | 236 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag)); |
| 319 NodeProperties::ChangeOp(node, machine()->WordEqual()); | 237 NodeProperties::ChangeOp(node, machine()->WordEqual()); |
| 320 return Changed(node); | 238 return Changed(node); |
| 321 } | 239 } |
| 322 | 240 |
| 323 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); } | 241 Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); } |
| 324 | 242 |
| 325 | 243 |
| 326 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); } | 244 Graph* ChangeLowering::graph() const { return jsgraph()->graph(); } |
| 327 | 245 |
| 328 | 246 |
| 329 CommonOperatorBuilder* ChangeLowering::common() const { | 247 CommonOperatorBuilder* ChangeLowering::common() const { |
| 330 return jsgraph()->common(); | 248 return jsgraph()->common(); |
| 331 } | 249 } |
| 332 | 250 |
| 333 | 251 |
| 334 MachineOperatorBuilder* ChangeLowering::machine() const { | 252 MachineOperatorBuilder* ChangeLowering::machine() const { |
| 335 return jsgraph()->machine(); | 253 return jsgraph()->machine(); |
| 336 } | 254 } |
| 337 | 255 |
| 338 } // namespace compiler | 256 } // namespace compiler |
| 339 } // namespace internal | 257 } // namespace internal |
| 340 } // namespace v8 | 258 } // namespace v8 |
| OLD | NEW |