| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
| 6 | 6 |
| 7 #include <memory> | 7 #include <memory> |
| 8 | 8 |
| 9 #include "src/bailout-reason.h" | 9 #include "src/bailout-reason.h" |
| 10 #include "src/crankshaft/hydrogen.h" | 10 #include "src/crankshaft/hydrogen.h" |
| (...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 return BuildNumberToString(number, Type::Number()); | 337 return BuildNumberToString(number, Type::Number()); |
| 338 } | 338 } |
| 339 | 339 |
| 340 | 340 |
| 341 Handle<Code> NumberToStringStub::GenerateCode() { | 341 Handle<Code> NumberToStringStub::GenerateCode() { |
| 342 return DoGenerateCode(this); | 342 return DoGenerateCode(this); |
| 343 } | 343 } |
| 344 | 344 |
| 345 | 345 |
| 346 template <> | 346 template <> |
| 347 HValue* CodeStubGraphBuilder<FastCloneRegExpStub>::BuildCodeStub() { | |
| 348 HValue* closure = GetParameter(Descriptor::kClosure); | |
| 349 HValue* literal_index = GetParameter(Descriptor::kLiteralIndex); | |
| 350 | |
| 351 // This stub is very performance sensitive, the generated code must be tuned | |
| 352 // so that it doesn't build and eager frame. | |
| 353 info()->MarkMustNotHaveEagerFrame(); | |
| 354 | |
| 355 HValue* literals_array = Add<HLoadNamedField>( | |
| 356 closure, nullptr, HObjectAccess::ForLiteralsPointer()); | |
| 357 HInstruction* boilerplate = Add<HLoadKeyed>( | |
| 358 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, | |
| 359 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); | |
| 360 | |
| 361 IfBuilder if_notundefined(this); | |
| 362 if_notundefined.IfNot<HCompareObjectEqAndBranch>( | |
| 363 boilerplate, graph()->GetConstantUndefined()); | |
| 364 if_notundefined.Then(); | |
| 365 { | |
| 366 int result_size = | |
| 367 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | |
| 368 HValue* result = | |
| 369 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(), | |
| 370 NOT_TENURED, JS_REGEXP_TYPE, graph()->GetConstant0()); | |
| 371 Add<HStoreNamedField>( | |
| 372 result, HObjectAccess::ForMap(), | |
| 373 Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap())); | |
| 374 Add<HStoreNamedField>( | |
| 375 result, HObjectAccess::ForPropertiesPointer(), | |
| 376 Add<HLoadNamedField>(boilerplate, nullptr, | |
| 377 HObjectAccess::ForPropertiesPointer())); | |
| 378 Add<HStoreNamedField>( | |
| 379 result, HObjectAccess::ForElementsPointer(), | |
| 380 Add<HLoadNamedField>(boilerplate, nullptr, | |
| 381 HObjectAccess::ForElementsPointer())); | |
| 382 for (int offset = JSObject::kHeaderSize; offset < result_size; | |
| 383 offset += kPointerSize) { | |
| 384 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); | |
| 385 Add<HStoreNamedField>(result, access, | |
| 386 Add<HLoadNamedField>(boilerplate, nullptr, access)); | |
| 387 } | |
| 388 Push(result); | |
| 389 } | |
| 390 if_notundefined.ElseDeopt( | |
| 391 DeoptimizeReason::kUninitializedBoilerplateInFastClone); | |
| 392 if_notundefined.End(); | |
| 393 | |
| 394 return Pop(); | |
| 395 } | |
| 396 | |
| 397 | |
| 398 Handle<Code> FastCloneRegExpStub::GenerateCode() { | |
| 399 return DoGenerateCode(this); | |
| 400 } | |
| 401 | |
| 402 | |
| 403 template <> | |
| 404 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() { | 347 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() { |
| 405 Factory* factory = isolate()->factory(); | 348 Factory* factory = isolate()->factory(); |
| 406 HValue* undefined = graph()->GetConstantUndefined(); | 349 HValue* undefined = graph()->GetConstantUndefined(); |
| 407 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); | 350 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); |
| 408 HValue* closure = GetParameter(Descriptor::kClosure); | 351 HValue* closure = GetParameter(Descriptor::kClosure); |
| 409 HValue* literal_index = GetParameter(Descriptor::kLiteralIndex); | 352 HValue* literal_index = GetParameter(Descriptor::kLiteralIndex); |
| 410 | 353 |
| 411 // TODO(turbofan): This codestub has regressed to need a frame on ia32 at some | 354 // TODO(turbofan): This codestub has regressed to need a frame on ia32 at some |
| 412 // point and wasn't caught since it wasn't built in the snapshot. We should | 355 // point and wasn't caught since it wasn't built in the snapshot. We should |
| 413 // probably just replace with a TurboFan stub rather than fixing it. | 356 // probably just replace with a TurboFan stub rather than fixing it. |
| (...skipping 1625 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2039 return Pop(); | 1982 return Pop(); |
| 2040 } | 1983 } |
| 2041 | 1984 |
| 2042 | 1985 |
| 2043 Handle<Code> KeyedLoadGenericStub::GenerateCode() { | 1986 Handle<Code> KeyedLoadGenericStub::GenerateCode() { |
| 2044 return DoGenerateCode(this); | 1987 return DoGenerateCode(this); |
| 2045 } | 1988 } |
| 2046 | 1989 |
| 2047 } // namespace internal | 1990 } // namespace internal |
| 2048 } // namespace v8 | 1991 } // namespace v8 |
| OLD | NEW |