Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(716)

Side by Side Diff: src/builtins/builtins-constructor.cc

Issue 2605893002: [builtins] More stubs to the builtin-o-sphere. (Closed)
Patch Set: Fixed compile error. Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/builtins/builtins-constructor.h ('k') | src/code-factory.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/builtins/builtins-constructor.h" 5 #include "src/builtins/builtins-constructor.h"
6 #include "src/ast/ast.h"
6 #include "src/builtins/builtins-utils.h" 7 #include "src/builtins/builtins-utils.h"
7 #include "src/builtins/builtins.h" 8 #include "src/builtins/builtins.h"
8 #include "src/code-factory.h" 9 #include "src/code-factory.h"
9 #include "src/code-stub-assembler.h" 10 #include "src/code-stub-assembler.h"
10 #include "src/interface-descriptors.h" 11 #include "src/interface-descriptors.h"
11 12
12 namespace v8 { 13 namespace v8 {
13 namespace internal { 14 namespace internal {
14 15
15 typedef compiler::Node Node; 16 typedef compiler::Node Node;
(...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after
349 case ScopeType::EVAL_SCOPE: 350 case ScopeType::EVAL_SCOPE:
350 return FastNewFunctionContextEval(); 351 return FastNewFunctionContextEval();
351 case ScopeType::FUNCTION_SCOPE: 352 case ScopeType::FUNCTION_SCOPE:
352 return FastNewFunctionContextFunction(); 353 return FastNewFunctionContextFunction();
353 default: 354 default:
354 UNREACHABLE(); 355 UNREACHABLE();
355 } 356 }
356 return Handle<Code>::null(); 357 return Handle<Code>::null();
357 } 358 }
358 359
360 Node* ConstructorBuiltinsAssembler::EmitFastCloneRegExp(Node* closure,
361 Node* literal_index,
362 Node* pattern,
363 Node* flags,
364 Node* context) {
365 typedef CodeStubAssembler::Label Label;
366 typedef CodeStubAssembler::Variable Variable;
367 typedef compiler::Node Node;
368
369 Label call_runtime(this, Label::kDeferred), end(this);
370
371 Variable result(this, MachineRepresentation::kTagged);
372
373 Node* literals_array = LoadObjectField(closure, JSFunction::kLiteralsOffset);
374 Node* boilerplate =
375 LoadFixedArrayElement(literals_array, literal_index,
376 LiteralsArray::kFirstLiteralIndex * kPointerSize,
377 CodeStubAssembler::SMI_PARAMETERS);
378 GotoIf(IsUndefined(boilerplate), &call_runtime);
379
380 {
381 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
382 Node* copy = Allocate(size);
383 for (int offset = 0; offset < size; offset += kPointerSize) {
384 Node* value = LoadObjectField(boilerplate, offset);
385 StoreObjectFieldNoWriteBarrier(copy, offset, value);
386 }
387 result.Bind(copy);
388 Goto(&end);
389 }
390
391 Bind(&call_runtime);
392 {
393 result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context, closure,
394 literal_index, pattern, flags));
395 Goto(&end);
396 }
397
398 Bind(&end);
399 return result.value();
400 }
401
402 TF_BUILTIN(FastCloneRegExp, ConstructorBuiltinsAssembler) {
403 Node* closure = Parameter(FastCloneRegExpDescriptor::kClosure);
404 Node* literal_index = Parameter(FastCloneRegExpDescriptor::kLiteralIndex);
405 Node* pattern = Parameter(FastCloneRegExpDescriptor::kPattern);
406 Node* flags = Parameter(FastCloneRegExpDescriptor::kFlags);
407 Node* context = Parameter(FastCloneRegExpDescriptor::kContext);
408
409 Return(EmitFastCloneRegExp(closure, literal_index, pattern, flags, context));
410 }
411
412 Node* ConstructorBuiltinsAssembler::NonEmptyShallowClone(
413 Node* boilerplate, Node* boilerplate_map, Node* boilerplate_elements,
414 Node* allocation_site, Node* capacity, ElementsKind kind) {
415 typedef CodeStubAssembler::ParameterMode ParameterMode;
416
417 ParameterMode param_mode = OptimalParameterMode();
418
419 Node* length = LoadJSArrayLength(boilerplate);
420 capacity = TaggedToParameter(capacity, param_mode);
421
422 Node *array, *elements;
423 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
424 kind, boilerplate_map, length, allocation_site, capacity, param_mode);
425
426 Comment("copy elements header");
427 // Header consists of map and length.
428 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
429 StoreMap(elements, LoadMap(boilerplate_elements));
430 {
431 int offset = FixedArrayBase::kLengthOffset;
432 StoreObjectFieldNoWriteBarrier(
433 elements, offset, LoadObjectField(boilerplate_elements, offset));
434 }
435
436 length = TaggedToParameter(length, param_mode);
437
438 Comment("copy boilerplate elements");
439 CopyFixedArrayElements(kind, boilerplate_elements, elements, length,
440 SKIP_WRITE_BARRIER, param_mode);
441 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
442
443 return array;
444 }
445
446 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowArray(
447 Node* closure, Node* literal_index, Node* context,
448 CodeAssemblerLabel* call_runtime, AllocationSiteMode allocation_site_mode) {
449 typedef CodeStubAssembler::Label Label;
450 typedef CodeStubAssembler::Variable Variable;
451 typedef compiler::Node Node;
452
453 Label zero_capacity(this), cow_elements(this), fast_elements(this),
454 return_result(this);
455 Variable result(this, MachineRepresentation::kTagged);
456
457 Node* literals_array = LoadObjectField(closure, JSFunction::kLiteralsOffset);
458 Node* allocation_site =
459 LoadFixedArrayElement(literals_array, literal_index,
460 LiteralsArray::kFirstLiteralIndex * kPointerSize,
461 CodeStubAssembler::SMI_PARAMETERS);
462
463 GotoIf(IsUndefined(allocation_site), call_runtime);
464 allocation_site =
465 LoadFixedArrayElement(literals_array, literal_index,
466 LiteralsArray::kFirstLiteralIndex * kPointerSize,
467 CodeStubAssembler::SMI_PARAMETERS);
468
469 Node* boilerplate =
470 LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
471 Node* boilerplate_map = LoadMap(boilerplate);
472 Node* boilerplate_elements = LoadElements(boilerplate);
473 Node* capacity = LoadFixedArrayBaseLength(boilerplate_elements);
474 allocation_site =
475 allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
476
477 Node* zero = SmiConstant(Smi::kZero);
478 GotoIf(SmiEqual(capacity, zero), &zero_capacity);
479
480 Node* elements_map = LoadMap(boilerplate_elements);
481 GotoIf(IsFixedCOWArrayMap(elements_map), &cow_elements);
482
483 GotoIf(IsFixedArrayMap(elements_map), &fast_elements);
484 {
485 Comment("fast double elements path");
486 if (FLAG_debug_code) {
487 Label correct_elements_map(this), abort(this, Label::kDeferred);
488 Branch(IsFixedDoubleArrayMap(elements_map), &correct_elements_map,
489 &abort);
490
491 Bind(&abort);
492 {
493 Node* abort_id = SmiConstant(
494 Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
495 CallRuntime(Runtime::kAbort, context, abort_id);
496 result.Bind(UndefinedConstant());
497 Goto(&return_result);
498 }
499 Bind(&correct_elements_map);
500 }
501
502 Node* array =
503 NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
504 allocation_site, capacity, FAST_DOUBLE_ELEMENTS);
505 result.Bind(array);
506 Goto(&return_result);
507 }
508
509 Bind(&fast_elements);
510 {
511 Comment("fast elements path");
512 Node* array =
513 NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
514 allocation_site, capacity, FAST_ELEMENTS);
515 result.Bind(array);
516 Goto(&return_result);
517 }
518
519 Variable length(this, MachineRepresentation::kTagged),
520 elements(this, MachineRepresentation::kTagged);
521 Label allocate_without_elements(this);
522
523 Bind(&cow_elements);
524 {
525 Comment("fixed cow path");
526 length.Bind(LoadJSArrayLength(boilerplate));
527 elements.Bind(boilerplate_elements);
528
529 Goto(&allocate_without_elements);
530 }
531
532 Bind(&zero_capacity);
533 {
534 Comment("zero capacity path");
535 length.Bind(zero);
536 elements.Bind(LoadRoot(Heap::kEmptyFixedArrayRootIndex));
537
538 Goto(&allocate_without_elements);
539 }
540
541 Bind(&allocate_without_elements);
542 {
543 Node* array = AllocateUninitializedJSArrayWithoutElements(
544 FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
545 StoreObjectField(array, JSObject::kElementsOffset, elements.value());
546 result.Bind(array);
547 Goto(&return_result);
548 }
549
550 Bind(&return_result);
551 return result.value();
552 }
553
554 void ConstructorBuiltinsAssembler::CreateFastCloneShallowArrayBuiltin(
555 AllocationSiteMode allocation_site_mode) {
556 typedef compiler::Node Node;
557 typedef CodeStubAssembler::Label Label;
558
559 Node* closure = Parameter(FastCloneShallowArrayDescriptor::kClosure);
560 Node* literal_index =
561 Parameter(FastCloneShallowArrayDescriptor::kLiteralIndex);
562 Node* constant_elements =
563 Parameter(FastCloneShallowArrayDescriptor::kConstantElements);
564 Node* context = Parameter(FastCloneShallowArrayDescriptor::kContext);
565 Label call_runtime(this, Label::kDeferred);
566 Return(EmitFastCloneShallowArray(closure, literal_index, context,
567 &call_runtime, allocation_site_mode));
568
569 Bind(&call_runtime);
570 {
571 Comment("call runtime");
572 Node* flags =
573 SmiConstant(Smi::FromInt(ArrayLiteral::kShallowElements |
574 (allocation_site_mode == TRACK_ALLOCATION_SITE
575 ? 0
576 : ArrayLiteral::kDisableMementos)));
577 Return(CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
578 literal_index, constant_elements, flags));
579 }
580 }
581
582 TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) {
583 CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE);
584 }
585
586 TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) {
587 CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE);
588 }
589
590 Handle<Code> Builtins::NewCloneShallowArray(
591 AllocationSiteMode allocation_mode) {
592 switch (allocation_mode) {
593 case TRACK_ALLOCATION_SITE:
594 return FastCloneShallowArrayTrack();
595 case DONT_TRACK_ALLOCATION_SITE:
596 return FastCloneShallowArrayDontTrack();
597 default:
598 UNREACHABLE();
599 }
600 return Handle<Code>::null();
601 }
602
603 // static
604 int ConstructorBuiltinsAssembler::FastCloneShallowObjectPropertiesCount(
605 int literal_length) {
606 // This heuristic of setting empty literals to have
607 // kInitialGlobalObjectUnusedPropertiesCount must remain in-sync with the
608 // runtime.
609 // TODO(verwaest): Unify this with the heuristic in the runtime.
610 return literal_length == 0
611 ? JSObject::kInitialGlobalObjectUnusedPropertiesCount
612 : literal_length;
613 }
614
615 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
616 CodeAssemblerLabel* call_runtime, Node* closure, Node* literals_index,
617 Node* properties_count) {
618 Node* literals_array = LoadObjectField(closure, JSFunction::kLiteralsOffset);
619 Node* allocation_site =
620 LoadFixedArrayElement(literals_array, literals_index,
621 LiteralsArray::kFirstLiteralIndex * kPointerSize,
622 CodeStubAssembler::SMI_PARAMETERS);
623 GotoIf(IsUndefined(allocation_site), call_runtime);
624
625 // Calculate the object and allocation size based on the properties count.
626 Node* object_size = IntPtrAdd(WordShl(properties_count, kPointerSizeLog2),
627 IntPtrConstant(JSObject::kHeaderSize));
628 Node* allocation_size = object_size;
629 if (FLAG_allocation_site_pretenuring) {
630 allocation_size =
631 IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize));
632 }
633 Node* boilerplate =
634 LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
635 Node* boilerplate_map = LoadMap(boilerplate);
636 Node* instance_size = LoadMapInstanceSize(boilerplate_map);
637 Node* size_in_words = WordShr(object_size, kPointerSizeLog2);
638 GotoUnless(WordEqual(instance_size, size_in_words), call_runtime);
639
640 Node* copy = Allocate(allocation_size);
641
642 // Copy boilerplate elements.
643 Variable offset(this, MachineType::PointerRepresentation());
644 offset.Bind(IntPtrConstant(-kHeapObjectTag));
645 Node* end_offset = IntPtrAdd(object_size, offset.value());
646 Label loop_body(this, &offset), loop_check(this, &offset);
647 // We should always have an object size greater than zero.
648 Goto(&loop_body);
649 Bind(&loop_body);
650 {
651 // The Allocate above guarantees that the copy lies in new space. This
652 // allows us to skip write barriers. This is necessary since we may also be
653 // copying unboxed doubles.
654 Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value());
655 StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy,
656 offset.value(), field);
657 Goto(&loop_check);
658 }
659 Bind(&loop_check);
660 {
661 offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize)));
662 GotoUnless(IntPtrGreaterThanOrEqual(offset.value(), end_offset),
663 &loop_body);
664 }
665
666 if (FLAG_allocation_site_pretenuring) {
667 Node* memento = InnerAllocate(copy, object_size);
668 StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
669 StoreObjectFieldNoWriteBarrier(
670 memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
671 Node* memento_create_count = LoadObjectField(
672 allocation_site, AllocationSite::kPretenureCreateCountOffset);
673 memento_create_count =
674 SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1)));
675 StoreObjectFieldNoWriteBarrier(allocation_site,
676 AllocationSite::kPretenureCreateCountOffset,
677 memento_create_count);
678 }
679
680 // TODO(verwaest): Allocate and fill in double boxes.
681 return copy;
682 }
683
684 void ConstructorBuiltinsAssembler::CreateFastCloneShallowObjectBuiltin(
685 int properties_count) {
686 DCHECK_GE(properties_count, 0);
687 DCHECK_LE(properties_count, kMaximumClonedShallowObjectProperties);
688 Label call_runtime(this);
689 Node* closure = Parameter(0);
690 Node* literals_index = Parameter(1);
691
692 Node* properties_count_node =
693 IntPtrConstant(FastCloneShallowObjectPropertiesCount(properties_count));
694 Node* copy = EmitFastCloneShallowObject(
695 &call_runtime, closure, literals_index, properties_count_node);
696 Return(copy);
697
698 Bind(&call_runtime);
699 Node* constant_properties = Parameter(2);
700 Node* flags = Parameter(3);
701 Node* context = Parameter(4);
702 TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
703 literals_index, constant_properties, flags);
704 }
705
706 #define SHALLOW_OBJECT_BUILTIN(props) \
707 TF_BUILTIN(FastCloneShallowObject##props, ConstructorBuiltinsAssembler) { \
708 CreateFastCloneShallowObjectBuiltin(props); \
709 }
710
711 SHALLOW_OBJECT_BUILTIN(0);
712 SHALLOW_OBJECT_BUILTIN(1);
713 SHALLOW_OBJECT_BUILTIN(2);
714 SHALLOW_OBJECT_BUILTIN(3);
715 SHALLOW_OBJECT_BUILTIN(4);
716 SHALLOW_OBJECT_BUILTIN(5);
717 SHALLOW_OBJECT_BUILTIN(6);
718
719 Handle<Code> Builtins::NewCloneShallowObject(int length) {
720 switch (length) {
721 case 0:
722 return FastCloneShallowObject0();
723 case 1:
724 return FastCloneShallowObject1();
725 case 2:
726 return FastCloneShallowObject2();
727 case 3:
728 return FastCloneShallowObject3();
729 case 4:
730 return FastCloneShallowObject4();
731 case 5:
732 return FastCloneShallowObject5();
733 case 6:
734 return FastCloneShallowObject6();
735 default:
736 UNREACHABLE();
737 }
738 return Handle<Code>::null();
739 }
740
359 } // namespace internal 741 } // namespace internal
360 } // namespace v8 742 } // namespace v8
OLDNEW
« no previous file with comments | « src/builtins/builtins-constructor.h ('k') | src/code-factory.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698