Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(554)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2380953002: [stubs] Generalize loop handling in CodeStubAssembler and improve common loop performance (Closed)
Patch Set: y Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 #include "src/frames-inl.h" 7 #include "src/frames-inl.h"
8 #include "src/frames.h" 8 #include "src/frames.h"
9 #include "src/ic/handler-configuration.h" 9 #include "src/ic/handler-configuration.h"
10 #include "src/ic/stub-cache.h" 10 #include "src/ic/stub-cache.h"
(...skipping 1420 matching lines...) Expand 10 before | Expand all | Expand 10 after
1431 Heap::RootListIndex value_root_index, ParameterMode mode) { 1431 Heap::RootListIndex value_root_index, ParameterMode mode) {
1432 bool is_double = IsFastDoubleElementsKind(kind); 1432 bool is_double = IsFastDoubleElementsKind(kind);
1433 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || 1433 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
1434 value_root_index == Heap::kUndefinedValueRootIndex); 1434 value_root_index == Heap::kUndefinedValueRootIndex);
1435 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); 1435 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
1436 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); 1436 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
1437 Node* double_hole = 1437 Node* double_hole =
1438 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); 1438 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
1439 Node* value = LoadRoot(value_root_index); 1439 Node* value = LoadRoot(value_root_index);
1440 1440
1441 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; 1441 BuildFastFixedArrayForEach(
1442 int32_t to; 1442 array, kind, from_node, to_node,
1443 bool constant_to = ToInt32Constant(to_node, to); 1443 [value, is_double, double_hole](CodeStubAssembler* assembler, Node* array,
1444 int32_t from; 1444 Node* offset) {
1445 bool constant_from = ToInt32Constant(from_node, from); 1445 if (is_double) {
1446 if (constant_to && constant_from && 1446 // Don't use doubles to store the hole double, since manipulating the
1447 (to - from) <= kElementLoopUnrollThreshold) { 1447 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
1448 for (int i = from; i < to; ++i) { 1448 // change its value on ia32 (the x87 stack is used to return values
1449 Node* index = IntPtrConstant(i); 1449 // and stores to the stack silently clear the signalling bit).
1450 if (is_double) { 1450 //
1451 Node* offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS, 1451 // TODO(danno): When we have a Float32/Float64 wrapper class that
1452 first_element_offset); 1452 // preserves double bits during manipulation, remove this code/change
1453 // Don't use doubles to store the hole double, since manipulating the 1453 // this to an indexed Float64 store.
1454 // signaling NaN used for the hole in C++, e.g. with bit_cast, will 1454 if (assembler->Is64()) {
1455 // change its value on ia32 (the x87 stack is used to return values 1455 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord64,
1456 // and stores to the stack silently clear the signalling bit). 1456 array, offset, double_hole);
1457 // 1457 } else {
1458 // TODO(danno): When we have a Float32/Float64 wrapper class that 1458 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord32,
1459 // preserves double bits during manipulation, remove this code/change 1459 array, offset, double_hole);
1460 // this to an indexed Float64 store. 1460 assembler->StoreNoWriteBarrier(
1461 if (Is64()) { 1461 MachineRepresentation::kWord32, array,
1462 StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset, 1462 assembler->IntPtrAdd(offset,
1463 double_hole); 1463 assembler->IntPtrConstant(kPointerSize)),
1464 double_hole);
1465 }
1464 } else { 1466 } else {
1465 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset, 1467 assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, array,
1466 double_hole); 1468 offset, value);
1467 offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
1468 first_element_offset + kPointerSize);
1469 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
1470 double_hole);
1471 } 1469 }
1472 } else { 1470 },
1473 StoreFixedArrayElement(array, index, value, SKIP_WRITE_BARRIER, 1471 mode);
1474 INTPTR_PARAMETERS);
1475 }
1476 }
1477 } else {
1478 Variable current(this, MachineRepresentation::kTagged);
1479 Label test(this);
1480 Label decrement(this, &current);
1481 Label done(this);
1482 Node* limit =
1483 IntPtrAdd(array, ElementOffsetFromIndex(from_node, kind, mode));
1484 current.Bind(IntPtrAdd(array, ElementOffsetFromIndex(to_node, kind, mode)));
1485
1486 Branch(WordEqual(current.value(), limit), &done, &decrement);
1487
1488 Bind(&decrement);
1489 current.Bind(IntPtrSub(
1490 current.value(),
1491 IntPtrConstant(IsFastDoubleElementsKind(kind) ? kDoubleSize
1492 : kPointerSize)));
1493 if (is_double) {
1494 // Don't use doubles to store the hole double, since manipulating the
1495 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
1496 // change its value on ia32 (the x87 stack is used to return values
1497 // and stores to the stack silently clear the signalling bit).
1498 //
1499 // TODO(danno): When we have a Float32/Float64 wrapper class that
1500 // preserves double bits during manipulation, remove this code/change
1501 // this to an indexed Float64 store.
1502 if (Is64()) {
1503 StoreNoWriteBarrier(MachineRepresentation::kWord64, current.value(),
1504 Int64Constant(first_element_offset), double_hole);
1505 } else {
1506 StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
1507 Int32Constant(first_element_offset), double_hole);
1508 StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
1509 Int32Constant(kPointerSize + first_element_offset),
1510 double_hole);
1511 }
1512 } else {
1513 StoreNoWriteBarrier(MachineType::PointerRepresentation(), current.value(),
1514 IntPtrConstant(first_element_offset), value);
1515 }
1516 Node* compare = WordNotEqual(current.value(), limit);
1517 Branch(compare, &decrement, &done);
1518
1519 Bind(&done);
1520 }
1521 } 1472 }
1522 1473
1523 void CodeStubAssembler::CopyFixedArrayElements( 1474 void CodeStubAssembler::CopyFixedArrayElements(
1524 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, 1475 ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
1525 Node* to_array, Node* element_count, Node* capacity, 1476 Node* to_array, Node* element_count, Node* capacity,
1526 WriteBarrierMode barrier_mode, ParameterMode mode) { 1477 WriteBarrierMode barrier_mode, ParameterMode mode) {
1527 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); 1478 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
1528 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; 1479 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
1529 Comment("[ CopyFixedArrayElements"); 1480 Comment("[ CopyFixedArrayElements");
1530 1481
(...skipping 1761 matching lines...) Expand 10 before | Expand all | Expand 10 after
3292 var_entry->Bind(entry); 3243 var_entry->Bind(entry);
3293 Goto(&loop); 3244 Goto(&loop);
3294 } 3245 }
3295 } 3246 }
3296 3247
3297 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, 3248 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
3298 Node* descriptors, Node* nof, 3249 Node* descriptors, Node* nof,
3299 Label* if_found, 3250 Label* if_found,
3300 Variable* var_name_index, 3251 Variable* var_name_index,
3301 Label* if_not_found) { 3252 Label* if_not_found) {
3302 Variable var_descriptor(this, MachineType::PointerRepresentation()); 3253 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
3303 Label loop(this, &var_descriptor); 3254 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize);
3304 var_descriptor.Bind(IntPtrConstant(0)); 3255 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
3305 Goto(&loop);
3306 3256
3307 Bind(&loop); 3257 BuildFastLoop(
3308 { 3258 MachineType::PointerRepresentation(), last_exclusive, first_inclusive,
3309 Node* index = var_descriptor.value(); 3259 [descriptors, unique_name, if_found, var_name_index](
3310 Node* name_offset = IntPtrConstant(DescriptorArray::ToKeyIndex(0)); 3260 CodeStubAssembler* assembler, Node* name_index) {
3311 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize); 3261 Node* candidate_name = assembler->LoadFixedArrayElement(
3312 GotoIf(WordEqual(index, nof), if_not_found); 3262 descriptors, name_index, 0, INTPTR_PARAMETERS);
3313 Node* name_index = IntPtrAdd(name_offset, IntPtrMul(index, factor)); 3263 var_name_index->Bind(name_index);
3314 Node* candidate_name = 3264 assembler->GotoIf(assembler->WordEqual(candidate_name, unique_name),
3315 LoadFixedArrayElement(descriptors, name_index, 0, INTPTR_PARAMETERS); 3265 if_found);
3316 var_name_index->Bind(name_index); 3266 },
3317 GotoIf(WordEqual(candidate_name, unique_name), if_found); 3267 -DescriptorArray::kDescriptorSize, IndexAdvanceMode::kPre);
3318 var_descriptor.Bind(IntPtrAdd(index, IntPtrConstant(1))); 3268 Goto(if_not_found);
3319 Goto(&loop);
3320 }
3321 } 3269 }
3322 3270
3323 void CodeStubAssembler::TryLookupProperty( 3271 void CodeStubAssembler::TryLookupProperty(
3324 Node* object, Node* map, Node* instance_type, Node* unique_name, 3272 Node* object, Node* map, Node* instance_type, Node* unique_name,
3325 Label* if_found_fast, Label* if_found_dict, Label* if_found_global, 3273 Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
3326 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found, 3274 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
3327 Label* if_bailout) { 3275 Label* if_bailout) {
3328 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep()); 3276 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
3329 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); 3277 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
3330 3278
(...skipping 2362 matching lines...) Expand 10 before | Expand all | Expand 10 after
5693 StoreObjectField(cell, WeakCell::kValueOffset, value); 5641 StoreObjectField(cell, WeakCell::kValueOffset, value);
5694 StoreObjectFieldRoot(cell, WeakCell::kNextOffset, 5642 StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
5695 Heap::kTheHoleValueRootIndex); 5643 Heap::kTheHoleValueRootIndex);
5696 5644
5697 // Store the WeakCell in the feedback vector. 5645 // Store the WeakCell in the feedback vector.
5698 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 5646 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER,
5699 CodeStubAssembler::SMI_PARAMETERS); 5647 CodeStubAssembler::SMI_PARAMETERS);
5700 return cell; 5648 return cell;
5701 } 5649 }
5702 5650
5651 void CodeStubAssembler::BuildFastLoop(
5652 MachineRepresentation index_rep, Node* start_index, Node* end_index,
5653 std::function<void(CodeStubAssembler* assembler, Node* index)> body,
5654 int increment, IndexAdvanceMode mode) {
5655 Variable var(this, index_rep);
5656 var.Bind(start_index);
5657 Label loop(this, &var);
5658 Label after_loop(this);
5659 BranchIf(WordEqual(var.value(), end_index), &after_loop, &loop);
5660 Bind(&loop);
5661 {
Igor Sheludko 2016/10/04 09:32:25 Why not just put GotoIf(WordEqual(var.value(),
danno 2016/10/04 16:10:23 As discussed offline, for efficiency. Comment adde
5662 if (mode == IndexAdvanceMode::kPre) {
5663 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
5664 }
5665 body(this, var.value());
5666 if (mode == IndexAdvanceMode::kPost) {
5667 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
5668 }
5669 BranchIf(WordNotEqual(var.value(), end_index), &loop, &after_loop);
5670 }
5671 Bind(&after_loop);
5672 }
5673
5674 void CodeStubAssembler::BuildFastFixedArrayForEach(
5675 compiler::Node* fixed_array, ElementsKind kind,
5676 compiler::Node* first_element_inclusive,
5677 compiler::Node* last_element_exclusive,
5678 std::function<void(CodeStubAssembler* assembler,
5679 compiler::Node* fixed_array, compiler::Node* offset)>
5680 body,
5681 ParameterMode mode, ForEachDirection direction) {
Igor Sheludko 2016/10/04 09:32:25 I think we should put this assert somewhere in thi
danno 2016/10/04 16:10:23 Done.
5682 int32_t first_val;
5683 bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
5684 int32_t last_val;
5685 bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
5686 if (constant_first && constent_last) {
5687 int delta = last_val - first_val;
5688 DCHECK(delta >= 0);
5689 if (delta <= kElementLoopUnrollThreshold) {
5690 if (direction == ForEachDirection::kForward) {
5691 for (int i = first_val; i < last_val; ++i) {
5692 Node* index = IntPtrConstant(i);
5693 Node* offset =
5694 ElementOffsetFromIndex(index, FAST_ELEMENTS, mode,
Igor Sheludko 2016/10/04 09:32:25 I think you intended to use |kind| instead of |FAS
danno 2016/10/04 16:10:23 Done.
5695 FixedArray::kHeaderSize - kHeapObjectTag);
5696 body(this, fixed_array, offset);
5697 }
5698 } else {
5699 for (int i = last_val - 1; i >= first_val; --i) {
5700 Node* index = IntPtrConstant(i);
5701 Node* offset =
5702 ElementOffsetFromIndex(index, FAST_ELEMENTS, mode,
Igor Sheludko 2016/10/04 09:32:25 Same here
danno 2016/10/04 16:10:23 Done.
5703 FixedArray::kHeaderSize - kHeapObjectTag);
5704 body(this, fixed_array, offset);
5705 }
5706 }
5707 return;
5708 }
5709 }
5710
5711 Node* start =
5712 ElementOffsetFromIndex(first_element_inclusive, kind, mode,
5713 FixedArray::kHeaderSize - kHeapObjectTag);
5714 Node* limit =
5715 ElementOffsetFromIndex(last_element_exclusive, kind, mode,
5716 FixedArray::kHeaderSize - kHeapObjectTag);
5717 if (direction == ForEachDirection::kReverse) std::swap(start, limit);
5718
5719 int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
5720 BuildFastLoop(
5721 MachineType::PointerRepresentation(), start, limit,
5722 [fixed_array, body](CodeStubAssembler* assembler, Node* index) {
Igor Sheludko 2016/10/04 09:32:25 s/index/offset/
danno 2016/10/04 16:10:23 Done.
5723 body(assembler, fixed_array, index);
5724 },
5725 direction == ForEachDirection::kReverse ? -increment : increment,
5726 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
5727 : IndexAdvanceMode::kPost);
5728 }
5729
5703 } // namespace internal 5730 } // namespace internal
5704 } // namespace v8 5731 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/code-stubs.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698