| OLD | NEW |
| (Empty) |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/compiler/code-stub-assembler.h" | |
| 6 | |
| 7 #include <ostream> | |
| 8 | |
| 9 #include "src/code-factory.h" | |
| 10 #include "src/compiler/graph.h" | |
| 11 #include "src/compiler/instruction-selector.h" | |
| 12 #include "src/compiler/linkage.h" | |
| 13 #include "src/compiler/pipeline.h" | |
| 14 #include "src/compiler/raw-machine-assembler.h" | |
| 15 #include "src/compiler/schedule.h" | |
| 16 #include "src/frames.h" | |
| 17 #include "src/interface-descriptors.h" | |
| 18 #include "src/interpreter/bytecodes.h" | |
| 19 #include "src/machine-type.h" | |
| 20 #include "src/macro-assembler.h" | |
| 21 #include "src/zone.h" | |
| 22 | |
| 23 namespace v8 { | |
| 24 namespace internal { | |
| 25 namespace compiler { | |
| 26 | |
| 27 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | |
| 28 const CallInterfaceDescriptor& descriptor, | |
| 29 Code::Flags flags, const char* name, | |
| 30 size_t result_size) | |
| 31 : CodeStubAssembler( | |
| 32 isolate, zone, | |
| 33 Linkage::GetStubCallDescriptor( | |
| 34 isolate, zone, descriptor, descriptor.GetStackParameterCount(), | |
| 35 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
| 36 MachineType::AnyTagged(), result_size), | |
| 37 flags, name) {} | |
| 38 | |
| 39 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | |
| 40 int parameter_count, Code::Flags flags, | |
| 41 const char* name) | |
| 42 : CodeStubAssembler(isolate, zone, Linkage::GetJSCallDescriptor( | |
| 43 zone, false, parameter_count, | |
| 44 CallDescriptor::kNoFlags), | |
| 45 flags, name) {} | |
| 46 | |
| 47 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, | |
| 48 CallDescriptor* call_descriptor, | |
| 49 Code::Flags flags, const char* name) | |
| 50 : raw_assembler_(new RawMachineAssembler( | |
| 51 isolate, new (zone) Graph(zone), call_descriptor, | |
| 52 MachineType::PointerRepresentation(), | |
| 53 InstructionSelector::SupportedMachineOperatorFlags())), | |
| 54 flags_(flags), | |
| 55 name_(name), | |
| 56 code_generated_(false), | |
| 57 variables_(zone) {} | |
| 58 | |
| 59 CodeStubAssembler::~CodeStubAssembler() {} | |
| 60 | |
| 61 void CodeStubAssembler::CallPrologue() {} | |
| 62 | |
| 63 void CodeStubAssembler::CallEpilogue() {} | |
| 64 | |
| 65 Handle<Code> CodeStubAssembler::GenerateCode() { | |
| 66 DCHECK(!code_generated_); | |
| 67 | |
| 68 Schedule* schedule = raw_assembler_->Export(); | |
| 69 Handle<Code> code = Pipeline::GenerateCodeForCodeStub( | |
| 70 isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags_, | |
| 71 name_); | |
| 72 | |
| 73 code_generated_ = true; | |
| 74 return code; | |
| 75 } | |
| 76 | |
| 77 | |
| 78 Node* CodeStubAssembler::Int32Constant(int value) { | |
| 79 return raw_assembler_->Int32Constant(value); | |
| 80 } | |
| 81 | |
| 82 | |
| 83 Node* CodeStubAssembler::IntPtrConstant(intptr_t value) { | |
| 84 return raw_assembler_->IntPtrConstant(value); | |
| 85 } | |
| 86 | |
| 87 | |
| 88 Node* CodeStubAssembler::NumberConstant(double value) { | |
| 89 return raw_assembler_->NumberConstant(value); | |
| 90 } | |
| 91 | |
| 92 Node* CodeStubAssembler::SmiConstant(Smi* value) { | |
| 93 return IntPtrConstant(bit_cast<intptr_t>(value)); | |
| 94 } | |
| 95 | |
| 96 Node* CodeStubAssembler::HeapConstant(Handle<HeapObject> object) { | |
| 97 return raw_assembler_->HeapConstant(object); | |
| 98 } | |
| 99 | |
| 100 | |
| 101 Node* CodeStubAssembler::BooleanConstant(bool value) { | |
| 102 return raw_assembler_->BooleanConstant(value); | |
| 103 } | |
| 104 | |
| 105 Node* CodeStubAssembler::ExternalConstant(ExternalReference address) { | |
| 106 return raw_assembler_->ExternalConstant(address); | |
| 107 } | |
| 108 | |
| 109 Node* CodeStubAssembler::Float64Constant(double value) { | |
| 110 return raw_assembler_->Float64Constant(value); | |
| 111 } | |
| 112 | |
| 113 Node* CodeStubAssembler::BooleanMapConstant() { | |
| 114 return HeapConstant(isolate()->factory()->boolean_map()); | |
| 115 } | |
| 116 | |
| 117 Node* CodeStubAssembler::HeapNumberMapConstant() { | |
| 118 return HeapConstant(isolate()->factory()->heap_number_map()); | |
| 119 } | |
| 120 | |
| 121 Node* CodeStubAssembler::NullConstant() { | |
| 122 return LoadRoot(Heap::kNullValueRootIndex); | |
| 123 } | |
| 124 | |
| 125 Node* CodeStubAssembler::UndefinedConstant() { | |
| 126 return LoadRoot(Heap::kUndefinedValueRootIndex); | |
| 127 } | |
| 128 | |
| 129 Node* CodeStubAssembler::Parameter(int value) { | |
| 130 return raw_assembler_->Parameter(value); | |
| 131 } | |
| 132 | |
| 133 void CodeStubAssembler::Return(Node* value) { | |
| 134 return raw_assembler_->Return(value); | |
| 135 } | |
| 136 | |
| 137 void CodeStubAssembler::Bind(CodeStubAssembler::Label* label) { | |
| 138 return label->Bind(); | |
| 139 } | |
| 140 | |
| 141 Node* CodeStubAssembler::LoadFramePointer() { | |
| 142 return raw_assembler_->LoadFramePointer(); | |
| 143 } | |
| 144 | |
| 145 Node* CodeStubAssembler::LoadParentFramePointer() { | |
| 146 return raw_assembler_->LoadParentFramePointer(); | |
| 147 } | |
| 148 | |
| 149 Node* CodeStubAssembler::LoadStackPointer() { | |
| 150 return raw_assembler_->LoadStackPointer(); | |
| 151 } | |
| 152 | |
| 153 Node* CodeStubAssembler::SmiShiftBitsConstant() { | |
| 154 return IntPtrConstant(kSmiShiftSize + kSmiTagSize); | |
| 155 } | |
| 156 | |
| 157 Node* CodeStubAssembler::Float64Round(Node* x) { | |
| 158 Node* one = Float64Constant(1.0); | |
| 159 Node* one_half = Float64Constant(0.5); | |
| 160 | |
| 161 Variable var_x(this, MachineRepresentation::kFloat64); | |
| 162 Label return_x(this); | |
| 163 | |
| 164 // Round up {x} towards Infinity. | |
| 165 var_x.Bind(Float64Ceil(x)); | |
| 166 | |
| 167 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x), | |
| 168 &return_x); | |
| 169 var_x.Bind(Float64Sub(var_x.value(), one)); | |
| 170 Goto(&return_x); | |
| 171 | |
| 172 Bind(&return_x); | |
| 173 return var_x.value(); | |
| 174 } | |
| 175 | |
| 176 Node* CodeStubAssembler::Float64Ceil(Node* x) { | |
| 177 if (raw_assembler_->machine()->Float64RoundUp().IsSupported()) { | |
| 178 return raw_assembler_->Float64RoundUp(x); | |
| 179 } | |
| 180 | |
| 181 Node* one = Float64Constant(1.0); | |
| 182 Node* zero = Float64Constant(0.0); | |
| 183 Node* two_52 = Float64Constant(4503599627370496.0E0); | |
| 184 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); | |
| 185 | |
| 186 Variable var_x(this, MachineRepresentation::kFloat64); | |
| 187 Label return_x(this), return_minus_x(this); | |
| 188 var_x.Bind(x); | |
| 189 | |
| 190 // Check if {x} is greater than zero. | |
| 191 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); | |
| 192 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, | |
| 193 &if_xnotgreaterthanzero); | |
| 194 | |
| 195 Bind(&if_xgreaterthanzero); | |
| 196 { | |
| 197 // Just return {x} unless it's in the range ]0,2^52[. | |
| 198 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); | |
| 199 | |
| 200 // Round positive {x} towards Infinity. | |
| 201 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); | |
| 202 GotoUnless(Float64LessThan(var_x.value(), x), &return_x); | |
| 203 var_x.Bind(Float64Add(var_x.value(), one)); | |
| 204 Goto(&return_x); | |
| 205 } | |
| 206 | |
| 207 Bind(&if_xnotgreaterthanzero); | |
| 208 { | |
| 209 // Just return {x} unless it's in the range ]-2^52,0[ | |
| 210 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); | |
| 211 GotoUnless(Float64LessThan(x, zero), &return_x); | |
| 212 | |
| 213 // Round negated {x} towards Infinity and return the result negated. | |
| 214 Node* minus_x = Float64Neg(x); | |
| 215 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); | |
| 216 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x); | |
| 217 var_x.Bind(Float64Sub(var_x.value(), one)); | |
| 218 Goto(&return_minus_x); | |
| 219 } | |
| 220 | |
| 221 Bind(&return_minus_x); | |
| 222 var_x.Bind(Float64Neg(var_x.value())); | |
| 223 Goto(&return_x); | |
| 224 | |
| 225 Bind(&return_x); | |
| 226 return var_x.value(); | |
| 227 } | |
| 228 | |
| 229 Node* CodeStubAssembler::Float64Floor(Node* x) { | |
| 230 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) { | |
| 231 return raw_assembler_->Float64RoundDown(x); | |
| 232 } | |
| 233 | |
| 234 Node* one = Float64Constant(1.0); | |
| 235 Node* zero = Float64Constant(0.0); | |
| 236 Node* two_52 = Float64Constant(4503599627370496.0E0); | |
| 237 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); | |
| 238 | |
| 239 Variable var_x(this, MachineRepresentation::kFloat64); | |
| 240 Label return_x(this), return_minus_x(this); | |
| 241 var_x.Bind(x); | |
| 242 | |
| 243 // Check if {x} is greater than zero. | |
| 244 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); | |
| 245 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, | |
| 246 &if_xnotgreaterthanzero); | |
| 247 | |
| 248 Bind(&if_xgreaterthanzero); | |
| 249 { | |
| 250 // Just return {x} unless it's in the range ]0,2^52[. | |
| 251 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); | |
| 252 | |
| 253 // Round positive {x} towards -Infinity. | |
| 254 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); | |
| 255 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x); | |
| 256 var_x.Bind(Float64Sub(var_x.value(), one)); | |
| 257 Goto(&return_x); | |
| 258 } | |
| 259 | |
| 260 Bind(&if_xnotgreaterthanzero); | |
| 261 { | |
| 262 // Just return {x} unless it's in the range ]-2^52,0[ | |
| 263 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); | |
| 264 GotoUnless(Float64LessThan(x, zero), &return_x); | |
| 265 | |
| 266 // Round negated {x} towards -Infinity and return the result negated. | |
| 267 Node* minus_x = Float64Neg(x); | |
| 268 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); | |
| 269 GotoUnless(Float64LessThan(var_x.value(), minus_x), &return_minus_x); | |
| 270 var_x.Bind(Float64Add(var_x.value(), one)); | |
| 271 Goto(&return_minus_x); | |
| 272 } | |
| 273 | |
| 274 Bind(&return_minus_x); | |
| 275 var_x.Bind(Float64Neg(var_x.value())); | |
| 276 Goto(&return_x); | |
| 277 | |
| 278 Bind(&return_x); | |
| 279 return var_x.value(); | |
| 280 } | |
| 281 | |
| 282 Node* CodeStubAssembler::Float64Trunc(Node* x) { | |
| 283 if (raw_assembler_->machine()->Float64RoundTruncate().IsSupported()) { | |
| 284 return raw_assembler_->Float64RoundTruncate(x); | |
| 285 } | |
| 286 | |
| 287 Node* one = Float64Constant(1.0); | |
| 288 Node* zero = Float64Constant(0.0); | |
| 289 Node* two_52 = Float64Constant(4503599627370496.0E0); | |
| 290 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0); | |
| 291 | |
| 292 Variable var_x(this, MachineRepresentation::kFloat64); | |
| 293 Label return_x(this), return_minus_x(this); | |
| 294 var_x.Bind(x); | |
| 295 | |
| 296 // Check if {x} is greater than 0. | |
| 297 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this); | |
| 298 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero, | |
| 299 &if_xnotgreaterthanzero); | |
| 300 | |
| 301 Bind(&if_xgreaterthanzero); | |
| 302 { | |
| 303 if (raw_assembler_->machine()->Float64RoundDown().IsSupported()) { | |
| 304 var_x.Bind(raw_assembler_->Float64RoundDown(x)); | |
| 305 } else { | |
| 306 // Just return {x} unless it's in the range ]0,2^52[. | |
| 307 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x); | |
| 308 | |
| 309 // Round positive {x} towards -Infinity. | |
| 310 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52)); | |
| 311 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x); | |
| 312 var_x.Bind(Float64Sub(var_x.value(), one)); | |
| 313 } | |
| 314 Goto(&return_x); | |
| 315 } | |
| 316 | |
| 317 Bind(&if_xnotgreaterthanzero); | |
| 318 { | |
| 319 if (raw_assembler_->machine()->Float64RoundUp().IsSupported()) { | |
| 320 var_x.Bind(raw_assembler_->Float64RoundUp(x)); | |
| 321 Goto(&return_x); | |
| 322 } else { | |
| 323 // Just return {x} unless its in the range ]-2^52,0[. | |
| 324 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x); | |
| 325 GotoUnless(Float64LessThan(x, zero), &return_x); | |
| 326 | |
| 327 // Round negated {x} towards -Infinity and return result negated. | |
| 328 Node* minus_x = Float64Neg(x); | |
| 329 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52)); | |
| 330 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x); | |
| 331 var_x.Bind(Float64Sub(var_x.value(), one)); | |
| 332 Goto(&return_minus_x); | |
| 333 } | |
| 334 } | |
| 335 | |
| 336 Bind(&return_minus_x); | |
| 337 var_x.Bind(Float64Neg(var_x.value())); | |
| 338 Goto(&return_x); | |
| 339 | |
| 340 Bind(&return_x); | |
| 341 return var_x.value(); | |
| 342 } | |
| 343 | |
| 344 Node* CodeStubAssembler::SmiTag(Node* value) { | |
| 345 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); | |
| 346 } | |
| 347 | |
| 348 Node* CodeStubAssembler::SmiUntag(Node* value) { | |
| 349 return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
| 350 } | |
| 351 | |
| 352 Node* CodeStubAssembler::SmiToWord32(Node* value) { | |
| 353 Node* result = raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
| 354 if (raw_assembler_->machine()->Is64()) { | |
| 355 result = raw_assembler_->TruncateInt64ToInt32(result); | |
| 356 } | |
| 357 return result; | |
| 358 } | |
| 359 | |
| 360 Node* CodeStubAssembler::SmiToFloat64(Node* value) { | |
| 361 return ChangeInt32ToFloat64(SmiUntag(value)); | |
| 362 } | |
| 363 | |
| 364 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); } | |
| 365 | |
| 366 Node* CodeStubAssembler::SmiAddWithOverflow(Node* a, Node* b) { | |
| 367 return IntPtrAddWithOverflow(a, b); | |
| 368 } | |
| 369 | |
| 370 Node* CodeStubAssembler::SmiSub(Node* a, Node* b) { return IntPtrSub(a, b); } | |
| 371 | |
| 372 Node* CodeStubAssembler::SmiSubWithOverflow(Node* a, Node* b) { | |
| 373 return IntPtrSubWithOverflow(a, b); | |
| 374 } | |
| 375 | |
| 376 Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); } | |
| 377 | |
| 378 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) { | |
| 379 return IntPtrLessThan(a, b); | |
| 380 } | |
| 381 | |
| 382 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) { | |
| 383 return IntPtrLessThanOrEqual(a, b); | |
| 384 } | |
| 385 | |
| 386 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { | |
| 387 // TODO(bmeurer): Consider using Select once available. | |
| 388 Variable min(this, MachineRepresentation::kTagged); | |
| 389 Label if_a(this), if_b(this), join(this); | |
| 390 BranchIfSmiLessThan(a, b, &if_a, &if_b); | |
| 391 Bind(&if_a); | |
| 392 min.Bind(a); | |
| 393 Goto(&join); | |
| 394 Bind(&if_b); | |
| 395 min.Bind(b); | |
| 396 Goto(&join); | |
| 397 Bind(&join); | |
| 398 return min.value(); | |
| 399 } | |
| 400 | |
| 401 #define DEFINE_CODE_STUB_ASSEMBER_BINARY_OP(name) \ | |
| 402 Node* CodeStubAssembler::name(Node* a, Node* b) { \ | |
| 403 return raw_assembler_->name(a, b); \ | |
| 404 } | |
| 405 CODE_STUB_ASSEMBLER_BINARY_OP_LIST(DEFINE_CODE_STUB_ASSEMBER_BINARY_OP) | |
| 406 #undef DEFINE_CODE_STUB_ASSEMBER_BINARY_OP | |
| 407 | |
| 408 Node* CodeStubAssembler::WordShl(Node* value, int shift) { | |
| 409 return raw_assembler_->WordShl(value, IntPtrConstant(shift)); | |
| 410 } | |
| 411 | |
| 412 #define DEFINE_CODE_STUB_ASSEMBER_UNARY_OP(name) \ | |
| 413 Node* CodeStubAssembler::name(Node* a) { return raw_assembler_->name(a); } | |
| 414 CODE_STUB_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_STUB_ASSEMBER_UNARY_OP) | |
| 415 #undef DEFINE_CODE_STUB_ASSEMBER_UNARY_OP | |
| 416 | |
| 417 Node* CodeStubAssembler::WordIsSmi(Node* a) { | |
| 418 return WordEqual(raw_assembler_->WordAnd(a, IntPtrConstant(kSmiTagMask)), | |
| 419 IntPtrConstant(0)); | |
| 420 } | |
| 421 | |
| 422 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { | |
| 423 return WordEqual( | |
| 424 raw_assembler_->WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), | |
| 425 IntPtrConstant(0)); | |
| 426 } | |
| 427 | |
| 428 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | |
| 429 MachineType rep) { | |
| 430 return raw_assembler_->Load(rep, buffer, IntPtrConstant(offset)); | |
| 431 } | |
| 432 | |
| 433 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | |
| 434 MachineType rep) { | |
| 435 return raw_assembler_->Load(rep, object, | |
| 436 IntPtrConstant(offset - kHeapObjectTag)); | |
| 437 } | |
| 438 | |
| 439 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { | |
| 440 return Load(MachineType::Float64(), object, | |
| 441 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag)); | |
| 442 } | |
| 443 | |
| 444 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { | |
| 445 return StoreNoWriteBarrier( | |
| 446 MachineRepresentation::kFloat64, object, | |
| 447 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value); | |
| 448 } | |
| 449 | |
| 450 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) { | |
| 451 Node* value = LoadHeapNumberValue(object); | |
| 452 return raw_assembler_->TruncateFloat64ToInt32(TruncationMode::kJavaScript, | |
| 453 value); | |
| 454 } | |
| 455 | |
| 456 Node* CodeStubAssembler::LoadMapBitField(Node* map) { | |
| 457 return Load(MachineType::Uint8(), map, | |
| 458 IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag)); | |
| 459 } | |
| 460 | |
| 461 Node* CodeStubAssembler::LoadMapBitField2(Node* map) { | |
| 462 return Load(MachineType::Uint8(), map, | |
| 463 IntPtrConstant(Map::kBitField2Offset - kHeapObjectTag)); | |
| 464 } | |
| 465 | |
| 466 Node* CodeStubAssembler::LoadMapBitField3(Node* map) { | |
| 467 return Load(MachineType::Uint32(), map, | |
| 468 IntPtrConstant(Map::kBitField3Offset - kHeapObjectTag)); | |
| 469 } | |
| 470 | |
| 471 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) { | |
| 472 return Load(MachineType::Uint8(), map, | |
| 473 IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag)); | |
| 474 } | |
| 475 | |
| 476 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { | |
| 477 return LoadObjectField(map, Map::kDescriptorsOffset); | |
| 478 } | |
| 479 | |
| 480 Node* CodeStubAssembler::LoadNameHash(Node* name) { | |
| 481 return Load(MachineType::Uint32(), name, | |
| 482 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag)); | |
| 483 } | |
| 484 | |
| 485 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index( | |
| 486 Node* object, Node* int32_index, int additional_offset) { | |
| 487 Node* header_size = IntPtrConstant(additional_offset + | |
| 488 FixedArray::kHeaderSize - kHeapObjectTag); | |
| 489 Node* scaled_index = WordShl(int32_index, IntPtrConstant(kPointerSizeLog2)); | |
| 490 Node* offset = IntPtrAdd(scaled_index, header_size); | |
| 491 return Load(MachineType::AnyTagged(), object, offset); | |
| 492 } | |
| 493 | |
| 494 Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object, | |
| 495 Node* smi_index, | |
| 496 int additional_offset) { | |
| 497 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize; | |
| 498 Node* header_size = IntPtrConstant(additional_offset + | |
| 499 FixedArray::kHeaderSize - kHeapObjectTag); | |
| 500 Node* scaled_index = | |
| 501 (kSmiShiftBits > kPointerSizeLog2) | |
| 502 ? WordSar(smi_index, IntPtrConstant(kSmiShiftBits - kPointerSizeLog2)) | |
| 503 : WordShl(smi_index, | |
| 504 IntPtrConstant(kPointerSizeLog2 - kSmiShiftBits)); | |
| 505 Node* offset = IntPtrAdd(scaled_index, header_size); | |
| 506 return Load(MachineType::AnyTagged(), object, offset); | |
| 507 } | |
| 508 | |
| 509 Node* CodeStubAssembler::LoadFixedArrayElementConstantIndex(Node* object, | |
| 510 int index) { | |
| 511 Node* offset = IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag + | |
| 512 index * kPointerSize); | |
| 513 return raw_assembler_->Load(MachineType::AnyTagged(), object, offset); | |
| 514 } | |
| 515 | |
| 516 Node* CodeStubAssembler::StoreFixedArrayElementNoWriteBarrier(Node* object, | |
| 517 Node* index, | |
| 518 Node* value) { | |
| 519 Node* offset = | |
| 520 IntPtrAdd(WordShl(index, IntPtrConstant(kPointerSizeLog2)), | |
| 521 IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 522 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, | |
| 523 value); | |
| 524 } | |
| 525 | |
| 526 Node* CodeStubAssembler::LoadRoot(Heap::RootListIndex root_index) { | |
| 527 if (isolate()->heap()->RootCanBeTreatedAsConstant(root_index)) { | |
| 528 Handle<Object> root = isolate()->heap()->root_handle(root_index); | |
| 529 if (root->IsSmi()) { | |
| 530 return SmiConstant(Smi::cast(*root)); | |
| 531 } else { | |
| 532 return HeapConstant(Handle<HeapObject>::cast(root)); | |
| 533 } | |
| 534 } | |
| 535 | |
| 536 compiler::Node* roots_array_start = | |
| 537 ExternalConstant(ExternalReference::roots_array_start(isolate())); | |
| 538 USE(roots_array_start); | |
| 539 | |
| 540 // TODO(danno): Implement thee root-access case where the root is not constant | |
| 541 // and must be loaded from the root array. | |
| 542 UNIMPLEMENTED(); | |
| 543 return nullptr; | |
| 544 } | |
| 545 | |
| 546 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, | |
| 547 AllocationFlags flags, | |
| 548 Node* top_address, | |
| 549 Node* limit_address) { | |
| 550 Node* top = Load(MachineType::Pointer(), top_address); | |
| 551 Node* limit = Load(MachineType::Pointer(), limit_address); | |
| 552 | |
| 553 // If there's not enough space, call the runtime. | |
| 554 RawMachineLabel runtime_call(RawMachineLabel::kDeferred), no_runtime_call, | |
| 555 merge_runtime; | |
| 556 raw_assembler_->Branch( | |
| 557 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes), | |
| 558 &runtime_call, &no_runtime_call); | |
| 559 | |
| 560 raw_assembler_->Bind(&runtime_call); | |
| 561 // AllocateInTargetSpace does not use the context. | |
| 562 Node* context = IntPtrConstant(0); | |
| 563 Node* runtime_flags = SmiTag(Int32Constant( | |
| 564 AllocateDoubleAlignFlag::encode(false) | | |
| 565 AllocateTargetSpace::encode(flags & kPretenured | |
| 566 ? AllocationSpace::OLD_SPACE | |
| 567 : AllocationSpace::NEW_SPACE))); | |
| 568 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context, | |
| 569 SmiTag(size_in_bytes), runtime_flags); | |
| 570 raw_assembler_->Goto(&merge_runtime); | |
| 571 | |
| 572 // When there is enough space, return `top' and bump it up. | |
| 573 raw_assembler_->Bind(&no_runtime_call); | |
| 574 Node* no_runtime_result = top; | |
| 575 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
| 576 IntPtrAdd(top, size_in_bytes)); | |
| 577 no_runtime_result = | |
| 578 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)); | |
| 579 raw_assembler_->Goto(&merge_runtime); | |
| 580 | |
| 581 raw_assembler_->Bind(&merge_runtime); | |
| 582 return raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
| 583 runtime_result, no_runtime_result); | |
| 584 } | |
| 585 | |
| 586 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, | |
| 587 AllocationFlags flags, | |
| 588 Node* top_address, | |
| 589 Node* limit_address) { | |
| 590 Node* top = Load(MachineType::Pointer(), top_address); | |
| 591 Node* limit = Load(MachineType::Pointer(), limit_address); | |
| 592 Node* adjusted_size = size_in_bytes; | |
| 593 if (flags & kDoubleAlignment) { | |
| 594 // TODO(epertoso): Simd128 alignment. | |
| 595 RawMachineLabel aligned, not_aligned, merge; | |
| 596 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), | |
| 597 ¬_aligned, &aligned); | |
| 598 | |
| 599 raw_assembler_->Bind(¬_aligned); | |
| 600 Node* not_aligned_size = | |
| 601 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize)); | |
| 602 raw_assembler_->Goto(&merge); | |
| 603 | |
| 604 raw_assembler_->Bind(&aligned); | |
| 605 raw_assembler_->Goto(&merge); | |
| 606 | |
| 607 raw_assembler_->Bind(&merge); | |
| 608 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
| 609 not_aligned_size, adjusted_size); | |
| 610 } | |
| 611 | |
| 612 Node* address = AllocateRawUnaligned(adjusted_size, kNone, top, limit); | |
| 613 | |
| 614 RawMachineLabel needs_filler, doesnt_need_filler, merge_address; | |
| 615 raw_assembler_->Branch( | |
| 616 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes), | |
| 617 &doesnt_need_filler, &needs_filler); | |
| 618 | |
| 619 raw_assembler_->Bind(&needs_filler); | |
| 620 // Store a filler and increase the address by kPointerSize. | |
| 621 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change | |
| 622 // it when Simd128 alignment is supported. | |
| 623 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, | |
| 624 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); | |
| 625 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize)); | |
| 626 raw_assembler_->Goto(&merge_address); | |
| 627 | |
| 628 raw_assembler_->Bind(&doesnt_need_filler); | |
| 629 Node* address_without_filler = address; | |
| 630 raw_assembler_->Goto(&merge_address); | |
| 631 | |
| 632 raw_assembler_->Bind(&merge_address); | |
| 633 address = raw_assembler_->Phi(MachineType::PointerRepresentation(), | |
| 634 address_with_filler, address_without_filler); | |
| 635 // Update the top. | |
| 636 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | |
| 637 IntPtrAdd(top, adjusted_size)); | |
| 638 return address; | |
| 639 } | |
| 640 | |
| 641 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { | |
| 642 bool const new_space = !(flags & kPretenured); | |
| 643 Node* top_address = ExternalConstant( | |
| 644 new_space | |
| 645 ? ExternalReference::new_space_allocation_top_address(isolate()) | |
| 646 : ExternalReference::old_space_allocation_top_address(isolate())); | |
| 647 Node* limit_address = ExternalConstant( | |
| 648 new_space | |
| 649 ? ExternalReference::new_space_allocation_limit_address(isolate()) | |
| 650 : ExternalReference::old_space_allocation_limit_address(isolate())); | |
| 651 | |
| 652 #ifdef V8_HOST_ARCH_32_BIT | |
| 653 if (flags & kDoubleAlignment) { | |
| 654 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, | |
| 655 limit_address); | |
| 656 } | |
| 657 #endif | |
| 658 | |
| 659 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, | |
| 660 limit_address); | |
| 661 } | |
| 662 | |
| 663 Node* CodeStubAssembler::AllocateHeapNumber() { | |
| 664 Node* result = Allocate(HeapNumber::kSize, kNone); | |
| 665 StoreMapNoWriteBarrier(result, HeapNumberMapConstant()); | |
| 666 return result; | |
| 667 } | |
| 668 | |
| 669 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) { | |
| 670 Node* result = AllocateHeapNumber(); | |
| 671 StoreHeapNumberValue(result, value); | |
| 672 return result; | |
| 673 } | |
| 674 | |
| 675 Node* CodeStubAssembler::Load(MachineType rep, Node* base) { | |
| 676 return raw_assembler_->Load(rep, base); | |
| 677 } | |
| 678 | |
| 679 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) { | |
| 680 return raw_assembler_->Load(rep, base, index); | |
| 681 } | |
| 682 | |
| 683 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, | |
| 684 Node* value) { | |
| 685 return raw_assembler_->Store(rep, base, value, kFullWriteBarrier); | |
| 686 } | |
| 687 | |
| 688 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, | |
| 689 Node* index, Node* value) { | |
| 690 return raw_assembler_->Store(rep, base, index, value, kFullWriteBarrier); | |
| 691 } | |
| 692 | |
| 693 Node* CodeStubAssembler::StoreNoWriteBarrier(MachineRepresentation rep, | |
| 694 Node* base, Node* value) { | |
| 695 return raw_assembler_->Store(rep, base, value, kNoWriteBarrier); | |
| 696 } | |
| 697 | |
| 698 Node* CodeStubAssembler::StoreNoWriteBarrier(MachineRepresentation rep, | |
| 699 Node* base, Node* index, | |
| 700 Node* value) { | |
| 701 return raw_assembler_->Store(rep, base, index, value, kNoWriteBarrier); | |
| 702 } | |
| 703 | |
| 704 Node* CodeStubAssembler::Projection(int index, Node* value) { | |
| 705 return raw_assembler_->Projection(index, value); | |
| 706 } | |
| 707 | |
| 708 Node* CodeStubAssembler::LoadMap(Node* object) { | |
| 709 return LoadObjectField(object, HeapObject::kMapOffset); | |
| 710 } | |
| 711 | |
| 712 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { | |
| 713 return StoreNoWriteBarrier( | |
| 714 MachineRepresentation::kTagged, object, | |
| 715 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map); | |
| 716 } | |
| 717 | |
| 718 Node* CodeStubAssembler::LoadInstanceType(Node* object) { | |
| 719 return LoadMapInstanceType(LoadMap(object)); | |
| 720 } | |
| 721 | |
| 722 Node* CodeStubAssembler::LoadElements(Node* object) { | |
| 723 return LoadObjectField(object, JSObject::kElementsOffset); | |
| 724 } | |
| 725 | |
| 726 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) { | |
| 727 return LoadObjectField(array, FixedArrayBase::kLengthOffset); | |
| 728 } | |
| 729 | |
| 730 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, | |
| 731 uint32_t mask) { | |
| 732 return raw_assembler_->Word32Shr( | |
| 733 raw_assembler_->Word32And(word32, raw_assembler_->Int32Constant(mask)), | |
| 734 raw_assembler_->Int32Constant(shift)); | |
| 735 } | |
| 736 | |
| 737 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) { | |
| 738 Node* value32 = raw_assembler_->TruncateFloat64ToInt32( | |
| 739 TruncationMode::kRoundToZero, value); | |
| 740 Node* value64 = ChangeInt32ToFloat64(value32); | |
| 741 | |
| 742 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this); | |
| 743 | |
| 744 Label if_valueisequal(this), if_valueisnotequal(this); | |
| 745 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal); | |
| 746 Bind(&if_valueisequal); | |
| 747 { | |
| 748 Label if_valueiszero(this), if_valueisnotzero(this); | |
| 749 Branch(Float64Equal(value, Float64Constant(0.0)), &if_valueiszero, | |
| 750 &if_valueisnotzero); | |
| 751 | |
| 752 Bind(&if_valueiszero); | |
| 753 BranchIfInt32LessThan(raw_assembler_->Float64ExtractHighWord32(value), | |
| 754 Int32Constant(0), &if_valueisheapnumber, | |
| 755 &if_valueisint32); | |
| 756 | |
| 757 Bind(&if_valueisnotzero); | |
| 758 Goto(&if_valueisint32); | |
| 759 } | |
| 760 Bind(&if_valueisnotequal); | |
| 761 Goto(&if_valueisheapnumber); | |
| 762 | |
| 763 Variable var_result(this, MachineRepresentation::kTagged); | |
| 764 Bind(&if_valueisint32); | |
| 765 { | |
| 766 if (raw_assembler_->machine()->Is64()) { | |
| 767 Node* result = SmiTag(ChangeInt32ToInt64(value32)); | |
| 768 var_result.Bind(result); | |
| 769 Goto(&if_join); | |
| 770 } else { | |
| 771 Node* pair = Int32AddWithOverflow(value32, value32); | |
| 772 Node* overflow = Projection(1, pair); | |
| 773 Label if_overflow(this, Label::kDeferred), if_notoverflow(this); | |
| 774 Branch(overflow, &if_overflow, &if_notoverflow); | |
| 775 Bind(&if_overflow); | |
| 776 Goto(&if_valueisheapnumber); | |
| 777 Bind(&if_notoverflow); | |
| 778 { | |
| 779 Node* result = Projection(0, pair); | |
| 780 var_result.Bind(result); | |
| 781 Goto(&if_join); | |
| 782 } | |
| 783 } | |
| 784 } | |
| 785 Bind(&if_valueisheapnumber); | |
| 786 { | |
| 787 Node* result = AllocateHeapNumberWithValue(value); | |
| 788 var_result.Bind(result); | |
| 789 Goto(&if_join); | |
| 790 } | |
| 791 Bind(&if_join); | |
| 792 return var_result.value(); | |
| 793 } | |
| 794 | |
| 795 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { | |
| 796 if (raw_assembler_->machine()->Is64()) { | |
| 797 return SmiTag(ChangeInt32ToInt64(value)); | |
| 798 } | |
| 799 Variable var_result(this, MachineRepresentation::kTagged); | |
| 800 Node* pair = Int32AddWithOverflow(value, value); | |
| 801 Node* overflow = Projection(1, pair); | |
| 802 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), | |
| 803 if_join(this); | |
| 804 Branch(overflow, &if_overflow, &if_notoverflow); | |
| 805 Bind(&if_overflow); | |
| 806 { | |
| 807 Node* value64 = ChangeInt32ToFloat64(value); | |
| 808 Node* result = AllocateHeapNumberWithValue(value64); | |
| 809 var_result.Bind(result); | |
| 810 } | |
| 811 Goto(&if_join); | |
| 812 Bind(&if_notoverflow); | |
| 813 { | |
| 814 Node* result = Projection(0, pair); | |
| 815 var_result.Bind(result); | |
| 816 } | |
| 817 Goto(&if_join); | |
| 818 Bind(&if_join); | |
| 819 return var_result.value(); | |
| 820 } | |
| 821 | |
| 822 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) { | |
| 823 // We might need to loop once due to ToNumber conversion. | |
| 824 Variable var_value(this, MachineRepresentation::kTagged), | |
| 825 var_result(this, MachineRepresentation::kFloat64); | |
| 826 Label loop(this, &var_value), done_loop(this, &var_result); | |
| 827 var_value.Bind(value); | |
| 828 Goto(&loop); | |
| 829 Bind(&loop); | |
| 830 { | |
| 831 // Load the current {value}. | |
| 832 value = var_value.value(); | |
| 833 | |
| 834 // Check if the {value} is a Smi or a HeapObject. | |
| 835 Label if_valueissmi(this), if_valueisnotsmi(this); | |
| 836 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); | |
| 837 | |
| 838 Bind(&if_valueissmi); | |
| 839 { | |
| 840 // Convert the Smi {value}. | |
| 841 var_result.Bind(SmiToFloat64(value)); | |
| 842 Goto(&done_loop); | |
| 843 } | |
| 844 | |
| 845 Bind(&if_valueisnotsmi); | |
| 846 { | |
| 847 // Check if {value} is a HeapNumber. | |
| 848 Label if_valueisheapnumber(this), | |
| 849 if_valueisnotheapnumber(this, Label::kDeferred); | |
| 850 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()), | |
| 851 &if_valueisheapnumber, &if_valueisnotheapnumber); | |
| 852 | |
| 853 Bind(&if_valueisheapnumber); | |
| 854 { | |
| 855 // Load the floating point value. | |
| 856 var_result.Bind(LoadHeapNumberValue(value)); | |
| 857 Goto(&done_loop); | |
| 858 } | |
| 859 | |
| 860 Bind(&if_valueisnotheapnumber); | |
| 861 { | |
| 862 // Convert the {value} to a Number first. | |
| 863 Callable callable = CodeFactory::NonNumberToNumber(isolate()); | |
| 864 var_value.Bind(CallStub(callable, context, value)); | |
| 865 Goto(&loop); | |
| 866 } | |
| 867 } | |
| 868 } | |
| 869 Bind(&done_loop); | |
| 870 return var_result.value(); | |
| 871 } | |
| 872 | |
| 873 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) { | |
| 874 // We might need to loop once due to ToNumber conversion. | |
| 875 Variable var_value(this, MachineRepresentation::kTagged), | |
| 876 var_result(this, MachineRepresentation::kWord32); | |
| 877 Label loop(this, &var_value), done_loop(this, &var_result); | |
| 878 var_value.Bind(value); | |
| 879 Goto(&loop); | |
| 880 Bind(&loop); | |
| 881 { | |
| 882 // Load the current {value}. | |
| 883 value = var_value.value(); | |
| 884 | |
| 885 // Check if the {value} is a Smi or a HeapObject. | |
| 886 Label if_valueissmi(this), if_valueisnotsmi(this); | |
| 887 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); | |
| 888 | |
| 889 Bind(&if_valueissmi); | |
| 890 { | |
| 891 // Convert the Smi {value}. | |
| 892 var_result.Bind(SmiToWord32(value)); | |
| 893 Goto(&done_loop); | |
| 894 } | |
| 895 | |
| 896 Bind(&if_valueisnotsmi); | |
| 897 { | |
| 898 // Check if {value} is a HeapNumber. | |
| 899 Label if_valueisheapnumber(this), | |
| 900 if_valueisnotheapnumber(this, Label::kDeferred); | |
| 901 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()), | |
| 902 &if_valueisheapnumber, &if_valueisnotheapnumber); | |
| 903 | |
| 904 Bind(&if_valueisheapnumber); | |
| 905 { | |
| 906 // Truncate the floating point value. | |
| 907 var_result.Bind(TruncateHeapNumberValueToWord32(value)); | |
| 908 Goto(&done_loop); | |
| 909 } | |
| 910 | |
| 911 Bind(&if_valueisnotheapnumber); | |
| 912 { | |
| 913 // Convert the {value} to a Number first. | |
| 914 Callable callable = CodeFactory::NonNumberToNumber(isolate()); | |
| 915 var_value.Bind(CallStub(callable, context, value)); | |
| 916 Goto(&loop); | |
| 917 } | |
| 918 } | |
| 919 } | |
| 920 Bind(&done_loop); | |
| 921 return var_result.value(); | |
| 922 } | |
| 923 | |
| 924 void CodeStubAssembler::BranchIf(Node* condition, Label* if_true, | |
| 925 Label* if_false) { | |
| 926 Label if_condition_is_true(this), if_condition_is_false(this); | |
| 927 Branch(condition, &if_condition_is_true, &if_condition_is_false); | |
| 928 Bind(&if_condition_is_true); | |
| 929 Goto(if_true); | |
| 930 Bind(&if_condition_is_false); | |
| 931 Goto(if_false); | |
| 932 } | |
| 933 | |
| 934 Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target, | |
| 935 Node** args) { | |
| 936 CallPrologue(); | |
| 937 Node* return_value = raw_assembler_->CallN(descriptor, code_target, args); | |
| 938 CallEpilogue(); | |
| 939 return return_value; | |
| 940 } | |
| 941 | |
| 942 | |
| 943 Node* CodeStubAssembler::TailCallN(CallDescriptor* descriptor, | |
| 944 Node* code_target, Node** args) { | |
| 945 return raw_assembler_->TailCallN(descriptor, code_target, args); | |
| 946 } | |
| 947 | |
| 948 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 949 Node* context) { | |
| 950 CallPrologue(); | |
| 951 Node* return_value = raw_assembler_->CallRuntime0(function_id, context); | |
| 952 CallEpilogue(); | |
| 953 return return_value; | |
| 954 } | |
| 955 | |
| 956 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 957 Node* context, Node* arg1) { | |
| 958 CallPrologue(); | |
| 959 Node* return_value = raw_assembler_->CallRuntime1(function_id, arg1, context); | |
| 960 CallEpilogue(); | |
| 961 return return_value; | |
| 962 } | |
| 963 | |
| 964 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 965 Node* context, Node* arg1, Node* arg2) { | |
| 966 CallPrologue(); | |
| 967 Node* return_value = | |
| 968 raw_assembler_->CallRuntime2(function_id, arg1, arg2, context); | |
| 969 CallEpilogue(); | |
| 970 return return_value; | |
| 971 } | |
| 972 | |
| 973 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 974 Node* context, Node* arg1, Node* arg2, | |
| 975 Node* arg3) { | |
| 976 CallPrologue(); | |
| 977 Node* return_value = | |
| 978 raw_assembler_->CallRuntime3(function_id, arg1, arg2, arg3, context); | |
| 979 CallEpilogue(); | |
| 980 return return_value; | |
| 981 } | |
| 982 | |
| 983 Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 984 Node* context, Node* arg1, Node* arg2, | |
| 985 Node* arg3, Node* arg4) { | |
| 986 CallPrologue(); | |
| 987 Node* return_value = raw_assembler_->CallRuntime4(function_id, arg1, arg2, | |
| 988 arg3, arg4, context); | |
| 989 CallEpilogue(); | |
| 990 return return_value; | |
| 991 } | |
| 992 | |
| 993 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
| 994 Node* context) { | |
| 995 return raw_assembler_->TailCallRuntime0(function_id, context); | |
| 996 } | |
| 997 | |
| 998 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
| 999 Node* context, Node* arg1) { | |
| 1000 return raw_assembler_->TailCallRuntime1(function_id, arg1, context); | |
| 1001 } | |
| 1002 | |
| 1003 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
| 1004 Node* context, Node* arg1, | |
| 1005 Node* arg2) { | |
| 1006 return raw_assembler_->TailCallRuntime2(function_id, arg1, arg2, context); | |
| 1007 } | |
| 1008 | |
| 1009 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
| 1010 Node* context, Node* arg1, Node* arg2, | |
| 1011 Node* arg3) { | |
| 1012 return raw_assembler_->TailCallRuntime3(function_id, arg1, arg2, arg3, | |
| 1013 context); | |
| 1014 } | |
| 1015 | |
| 1016 Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id, | |
| 1017 Node* context, Node* arg1, Node* arg2, | |
| 1018 Node* arg3, Node* arg4) { | |
| 1019 return raw_assembler_->TailCallRuntime4(function_id, arg1, arg2, arg3, arg4, | |
| 1020 context); | |
| 1021 } | |
| 1022 | |
| 1023 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context, | |
| 1024 Node* arg1, size_t result_size) { | |
| 1025 Node* target = HeapConstant(callable.code()); | |
| 1026 return CallStub(callable.descriptor(), target, context, arg1, result_size); | |
| 1027 } | |
| 1028 | |
| 1029 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context, | |
| 1030 Node* arg1, Node* arg2, size_t result_size) { | |
| 1031 Node* target = HeapConstant(callable.code()); | |
| 1032 return CallStub(callable.descriptor(), target, context, arg1, arg2, | |
| 1033 result_size); | |
| 1034 } | |
| 1035 | |
| 1036 Node* CodeStubAssembler::CallStub(Callable const& callable, Node* context, | |
| 1037 Node* arg1, Node* arg2, Node* arg3, | |
| 1038 size_t result_size) { | |
| 1039 Node* target = HeapConstant(callable.code()); | |
| 1040 return CallStub(callable.descriptor(), target, context, arg1, arg2, arg3, | |
| 1041 result_size); | |
| 1042 } | |
| 1043 | |
| 1044 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
| 1045 Node* target, Node* context, Node* arg1, | |
| 1046 size_t result_size) { | |
| 1047 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
| 1048 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
| 1049 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
| 1050 MachineType::AnyTagged(), result_size); | |
| 1051 | |
| 1052 Node** args = zone()->NewArray<Node*>(2); | |
| 1053 args[0] = arg1; | |
| 1054 args[1] = context; | |
| 1055 | |
| 1056 return CallN(call_descriptor, target, args); | |
| 1057 } | |
| 1058 | |
| 1059 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
| 1060 Node* target, Node* context, Node* arg1, | |
| 1061 Node* arg2, size_t result_size) { | |
| 1062 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
| 1063 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
| 1064 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
| 1065 MachineType::AnyTagged(), result_size); | |
| 1066 | |
| 1067 Node** args = zone()->NewArray<Node*>(3); | |
| 1068 args[0] = arg1; | |
| 1069 args[1] = arg2; | |
| 1070 args[2] = context; | |
| 1071 | |
| 1072 return CallN(call_descriptor, target, args); | |
| 1073 } | |
| 1074 | |
| 1075 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
| 1076 Node* target, Node* context, Node* arg1, | |
| 1077 Node* arg2, Node* arg3, size_t result_size) { | |
| 1078 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
| 1079 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
| 1080 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
| 1081 MachineType::AnyTagged(), result_size); | |
| 1082 | |
| 1083 Node** args = zone()->NewArray<Node*>(4); | |
| 1084 args[0] = arg1; | |
| 1085 args[1] = arg2; | |
| 1086 args[2] = arg3; | |
| 1087 args[3] = context; | |
| 1088 | |
| 1089 return CallN(call_descriptor, target, args); | |
| 1090 } | |
| 1091 | |
| 1092 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
| 1093 Node* target, Node* context, Node* arg1, | |
| 1094 Node* arg2, Node* arg3, Node* arg4, | |
| 1095 size_t result_size) { | |
| 1096 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
| 1097 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
| 1098 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
| 1099 MachineType::AnyTagged(), result_size); | |
| 1100 | |
| 1101 Node** args = zone()->NewArray<Node*>(5); | |
| 1102 args[0] = arg1; | |
| 1103 args[1] = arg2; | |
| 1104 args[2] = arg3; | |
| 1105 args[3] = arg4; | |
| 1106 args[4] = context; | |
| 1107 | |
| 1108 return CallN(call_descriptor, target, args); | |
| 1109 } | |
| 1110 | |
| 1111 Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, | |
| 1112 Node* target, Node* context, Node* arg1, | |
| 1113 Node* arg2, Node* arg3, Node* arg4, | |
| 1114 Node* arg5, size_t result_size) { | |
| 1115 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
| 1116 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
| 1117 CallDescriptor::kNoFlags, Operator::kNoProperties, | |
| 1118 MachineType::AnyTagged(), result_size); | |
| 1119 | |
| 1120 Node** args = zone()->NewArray<Node*>(6); | |
| 1121 args[0] = arg1; | |
| 1122 args[1] = arg2; | |
| 1123 args[2] = arg3; | |
| 1124 args[3] = arg4; | |
| 1125 args[4] = arg5; | |
| 1126 args[5] = context; | |
| 1127 | |
| 1128 return CallN(call_descriptor, target, args); | |
| 1129 } | |
| 1130 | |
| 1131 Node* CodeStubAssembler::TailCallStub(Callable const& callable, Node* context, | |
| 1132 Node* arg1, Node* arg2, | |
| 1133 size_t result_size) { | |
| 1134 Node* target = HeapConstant(callable.code()); | |
| 1135 return TailCallStub(callable.descriptor(), target, context, arg1, arg2, | |
| 1136 result_size); | |
| 1137 } | |
| 1138 | |
| 1139 Node* CodeStubAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor, | |
| 1140 Node* target, Node* context, Node* arg1, | |
| 1141 Node* arg2, size_t result_size) { | |
| 1142 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
| 1143 isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), | |
| 1144 CallDescriptor::kSupportsTailCalls, Operator::kNoProperties, | |
| 1145 MachineType::AnyTagged(), result_size); | |
| 1146 | |
| 1147 Node** args = zone()->NewArray<Node*>(3); | |
| 1148 args[0] = arg1; | |
| 1149 args[1] = arg2; | |
| 1150 args[2] = context; | |
| 1151 | |
| 1152 return raw_assembler_->TailCallN(call_descriptor, target, args); | |
| 1153 } | |
| 1154 | |
| 1155 Node* CodeStubAssembler::TailCall( | |
| 1156 const CallInterfaceDescriptor& interface_descriptor, Node* code_target, | |
| 1157 Node** args, size_t result_size) { | |
| 1158 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
| 1159 isolate(), zone(), interface_descriptor, | |
| 1160 interface_descriptor.GetStackParameterCount(), | |
| 1161 CallDescriptor::kSupportsTailCalls, Operator::kNoProperties, | |
| 1162 MachineType::AnyTagged(), result_size); | |
| 1163 return raw_assembler_->TailCallN(descriptor, code_target, args); | |
| 1164 } | |
| 1165 | |
| 1166 void CodeStubAssembler::Goto(CodeStubAssembler::Label* label) { | |
| 1167 label->MergeVariables(); | |
| 1168 raw_assembler_->Goto(label->label_); | |
| 1169 } | |
| 1170 | |
| 1171 void CodeStubAssembler::GotoIf(Node* condition, Label* true_label) { | |
| 1172 Label false_label(this); | |
| 1173 Branch(condition, true_label, &false_label); | |
| 1174 Bind(&false_label); | |
| 1175 } | |
| 1176 | |
| 1177 void CodeStubAssembler::GotoUnless(Node* condition, Label* false_label) { | |
| 1178 Label true_label(this); | |
| 1179 Branch(condition, &true_label, false_label); | |
| 1180 Bind(&true_label); | |
| 1181 } | |
| 1182 | |
| 1183 void CodeStubAssembler::Branch(Node* condition, | |
| 1184 CodeStubAssembler::Label* true_label, | |
| 1185 CodeStubAssembler::Label* false_label) { | |
| 1186 true_label->MergeVariables(); | |
| 1187 false_label->MergeVariables(); | |
| 1188 return raw_assembler_->Branch(condition, true_label->label_, | |
| 1189 false_label->label_); | |
| 1190 } | |
| 1191 | |
| 1192 void CodeStubAssembler::Switch(Node* index, Label* default_label, | |
| 1193 int32_t* case_values, Label** case_labels, | |
| 1194 size_t case_count) { | |
| 1195 RawMachineLabel** labels = | |
| 1196 new (zone()->New(sizeof(RawMachineLabel*) * case_count)) | |
| 1197 RawMachineLabel*[case_count]; | |
| 1198 for (size_t i = 0; i < case_count; ++i) { | |
| 1199 labels[i] = case_labels[i]->label_; | |
| 1200 case_labels[i]->MergeVariables(); | |
| 1201 default_label->MergeVariables(); | |
| 1202 } | |
| 1203 return raw_assembler_->Switch(index, default_label->label_, case_values, | |
| 1204 labels, case_count); | |
| 1205 } | |
| 1206 | |
| 1207 // RawMachineAssembler delegate helpers: | |
| 1208 Isolate* CodeStubAssembler::isolate() const { | |
| 1209 return raw_assembler_->isolate(); | |
| 1210 } | |
| 1211 | |
| 1212 Factory* CodeStubAssembler::factory() const { return isolate()->factory(); } | |
| 1213 | |
| 1214 Graph* CodeStubAssembler::graph() const { return raw_assembler_->graph(); } | |
| 1215 | |
| 1216 Zone* CodeStubAssembler::zone() const { return raw_assembler_->zone(); } | |
| 1217 | |
| 1218 // The core implementation of Variable is stored through an indirection so | |
| 1219 // that it can outlive the often block-scoped Variable declarations. This is | |
| 1220 // needed to ensure that variable binding and merging through phis can | |
| 1221 // properly be verified. | |
| 1222 class CodeStubAssembler::Variable::Impl : public ZoneObject { | |
| 1223 public: | |
| 1224 explicit Impl(MachineRepresentation rep) : value_(nullptr), rep_(rep) {} | |
| 1225 Node* value_; | |
| 1226 MachineRepresentation rep_; | |
| 1227 }; | |
| 1228 | |
| 1229 CodeStubAssembler::Variable::Variable(CodeStubAssembler* assembler, | |
| 1230 MachineRepresentation rep) | |
| 1231 : impl_(new (assembler->zone()) Impl(rep)) { | |
| 1232 assembler->variables_.push_back(impl_); | |
| 1233 } | |
| 1234 | |
| 1235 void CodeStubAssembler::Variable::Bind(Node* value) { impl_->value_ = value; } | |
| 1236 | |
| 1237 Node* CodeStubAssembler::Variable::value() const { | |
| 1238 DCHECK_NOT_NULL(impl_->value_); | |
| 1239 return impl_->value_; | |
| 1240 } | |
| 1241 | |
| 1242 MachineRepresentation CodeStubAssembler::Variable::rep() const { | |
| 1243 return impl_->rep_; | |
| 1244 } | |
| 1245 | |
| 1246 bool CodeStubAssembler::Variable::IsBound() const { | |
| 1247 return impl_->value_ != nullptr; | |
| 1248 } | |
| 1249 | |
| 1250 CodeStubAssembler::Label::Label(CodeStubAssembler* assembler, | |
| 1251 int merged_value_count, | |
| 1252 CodeStubAssembler::Variable** merged_variables, | |
| 1253 CodeStubAssembler::Label::Type type) | |
| 1254 : bound_(false), merge_count_(0), assembler_(assembler), label_(nullptr) { | |
| 1255 void* buffer = assembler->zone()->New(sizeof(RawMachineLabel)); | |
| 1256 label_ = new (buffer) | |
| 1257 RawMachineLabel(type == kDeferred ? RawMachineLabel::kDeferred | |
| 1258 : RawMachineLabel::kNonDeferred); | |
| 1259 for (int i = 0; i < merged_value_count; ++i) { | |
| 1260 variable_phis_[merged_variables[i]->impl_] = nullptr; | |
| 1261 } | |
| 1262 } | |
| 1263 | |
| 1264 void CodeStubAssembler::Label::MergeVariables() { | |
| 1265 ++merge_count_; | |
| 1266 for (auto var : assembler_->variables_) { | |
| 1267 size_t count = 0; | |
| 1268 Node* node = var->value_; | |
| 1269 if (node != nullptr) { | |
| 1270 auto i = variable_merges_.find(var); | |
| 1271 if (i != variable_merges_.end()) { | |
| 1272 i->second.push_back(node); | |
| 1273 count = i->second.size(); | |
| 1274 } else { | |
| 1275 count = 1; | |
| 1276 variable_merges_[var] = std::vector<Node*>(1, node); | |
| 1277 } | |
| 1278 } | |
| 1279 // If the following asserts, then you've jumped to a label without a bound | |
| 1280 // variable along that path that expects to merge its value into a phi. | |
| 1281 DCHECK(variable_phis_.find(var) == variable_phis_.end() || | |
| 1282 count == merge_count_); | |
| 1283 USE(count); | |
| 1284 | |
| 1285 // If the label is already bound, we already know the set of variables to | |
| 1286 // merge and phi nodes have already been created. | |
| 1287 if (bound_) { | |
| 1288 auto phi = variable_phis_.find(var); | |
| 1289 if (phi != variable_phis_.end()) { | |
| 1290 DCHECK_NOT_NULL(phi->second); | |
| 1291 assembler_->raw_assembler_->AppendPhiInput(phi->second, node); | |
| 1292 } else { | |
| 1293 auto i = variable_merges_.find(var); | |
| 1294 if (i != variable_merges_.end()) { | |
| 1295 // If the following assert fires, then you've declared a variable that | |
| 1296 // has the same bound value along all paths up until the point you | |
| 1297 // bound this label, but then later merged a path with a new value for | |
| 1298 // the variable after the label bind (it's not possible to add phis to | |
| 1299 // the bound label after the fact, just make sure to list the variable | |
| 1300 // in the label's constructor's list of merged variables). | |
| 1301 DCHECK(find_if(i->second.begin(), i->second.end(), | |
| 1302 [node](Node* e) -> bool { return node != e; }) == | |
| 1303 i->second.end()); | |
| 1304 } | |
| 1305 } | |
| 1306 } | |
| 1307 } | |
| 1308 } | |
| 1309 | |
| 1310 void CodeStubAssembler::Label::Bind() { | |
| 1311 DCHECK(!bound_); | |
| 1312 assembler_->raw_assembler_->Bind(label_); | |
| 1313 | |
| 1314 // Make sure that all variables that have changed along any path up to this | |
| 1315 // point are marked as merge variables. | |
| 1316 for (auto var : assembler_->variables_) { | |
| 1317 Node* shared_value = nullptr; | |
| 1318 auto i = variable_merges_.find(var); | |
| 1319 if (i != variable_merges_.end()) { | |
| 1320 for (auto value : i->second) { | |
| 1321 DCHECK(value != nullptr); | |
| 1322 if (value != shared_value) { | |
| 1323 if (shared_value == nullptr) { | |
| 1324 shared_value = value; | |
| 1325 } else { | |
| 1326 variable_phis_[var] = nullptr; | |
| 1327 } | |
| 1328 } | |
| 1329 } | |
| 1330 } | |
| 1331 } | |
| 1332 | |
| 1333 for (auto var : variable_phis_) { | |
| 1334 CodeStubAssembler::Variable::Impl* var_impl = var.first; | |
| 1335 auto i = variable_merges_.find(var_impl); | |
| 1336 // If the following assert fires, then a variable that has been marked as | |
| 1337 // being merged at the label--either by explicitly marking it so in the | |
| 1338 // label constructor or by having seen different bound values at branches | |
| 1339 // into the label--doesn't have a bound value along all of the paths that | |
| 1340 // have been merged into the label up to this point. | |
| 1341 DCHECK(i != variable_merges_.end() && i->second.size() == merge_count_); | |
| 1342 Node* phi = assembler_->raw_assembler_->Phi( | |
| 1343 var.first->rep_, static_cast<int>(merge_count_), &(i->second[0])); | |
| 1344 variable_phis_[var_impl] = phi; | |
| 1345 } | |
| 1346 | |
| 1347 // Bind all variables to a merge phi, the common value along all paths or | |
| 1348 // null. | |
| 1349 for (auto var : assembler_->variables_) { | |
| 1350 auto i = variable_phis_.find(var); | |
| 1351 if (i != variable_phis_.end()) { | |
| 1352 var->value_ = i->second; | |
| 1353 } else { | |
| 1354 auto j = variable_merges_.find(var); | |
| 1355 if (j != variable_merges_.end() && j->second.size() == merge_count_) { | |
| 1356 var->value_ = j->second.back(); | |
| 1357 } else { | |
| 1358 var->value_ = nullptr; | |
| 1359 } | |
| 1360 } | |
| 1361 } | |
| 1362 | |
| 1363 bound_ = true; | |
| 1364 } | |
| 1365 | |
| 1366 } // namespace compiler | |
| 1367 } // namespace internal | |
| 1368 } // namespace v8 | |
| OLD | NEW |