Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(232)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2497243002: [stubs] Port builtin for Array.push fast-case from Crankshaft to TF (Closed)
Patch Set: Cleanup Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h" 4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h" 5 #include "src/code-factory.h"
6 #include "src/frames-inl.h" 6 #include "src/frames-inl.h"
7 #include "src/frames.h" 7 #include "src/frames.h"
8 #include "src/ic/handler-configuration.h" 8 #include "src/ic/handler-configuration.h"
9 #include "src/ic/stub-cache.h" 9 #include "src/ic/stub-cache.h"
10 10
(...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after
520 520
521 Bind(&return_result); 521 Bind(&return_result);
522 return var_result.value(); 522 return var_result.value();
523 } 523 }
524 524
525 Node* CodeStubAssembler::TaggedIsSmi(Node* a) { 525 Node* CodeStubAssembler::TaggedIsSmi(Node* a) {
526 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)), 526 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
527 IntPtrConstant(0)); 527 IntPtrConstant(0));
528 } 528 }
529 529
530 Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) {
531 return WordNotEqual(
532 WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
533 IntPtrConstant(0));
534 }
535
530 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { 536 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
531 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), 537 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
532 IntPtrConstant(0)); 538 IntPtrConstant(0));
533 } 539 }
534 540
535 Node* CodeStubAssembler::WordIsWordAligned(Node* word) { 541 Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
536 return WordEqual(IntPtrConstant(0), 542 return WordEqual(IntPtrConstant(0),
537 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1))); 543 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1)));
538 } 544 }
539 545
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
648 654
649 void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true, 655 void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
650 Label* if_false) { 656 Label* if_false) {
651 GotoIf(TaggedIsSmi(object), if_false); 657 GotoIf(TaggedIsSmi(object), if_false);
652 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); 658 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
653 Branch(Int32GreaterThanOrEqual(LoadInstanceType(object), 659 Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
654 Int32Constant(FIRST_JS_OBJECT_TYPE)), 660 Int32Constant(FIRST_JS_OBJECT_TYPE)),
655 if_true, if_false); 661 if_true, if_false);
656 } 662 }
657 663
658 void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context, 664 void CodeStubAssembler::BranchIfFastJSArray(
659 Label* if_true, Label* if_false) { 665 Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode,
666 Label* if_true, Label* if_false) {
660 // Bailout if receiver is a Smi. 667 // Bailout if receiver is a Smi.
661 GotoIf(TaggedIsSmi(object), if_false); 668 GotoIf(TaggedIsSmi(object), if_false);
662 669
663 Node* map = LoadMap(object); 670 Node* map = LoadMap(object);
664 671
665 // Bailout if instance type is not JS_ARRAY_TYPE. 672 // Bailout if instance type is not JS_ARRAY_TYPE.
666 GotoIf(WordNotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)), 673 GotoIf(WordNotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
667 if_false); 674 if_false);
668 675
669 Node* elements_kind = LoadMapElementsKind(map); 676 Node* elements_kind = LoadMapElementsKind(map);
670 677
671 // Bailout if receiver has slow elements. 678 // Bailout if receiver has slow elements.
672 GotoUnless(IsFastElementsKind(elements_kind), if_false); 679 GotoUnless(IsFastElementsKind(elements_kind), if_false);
673 680
674 // Check prototype chain if receiver does not have packed elements. 681 // Check prototype chain if receiver does not have packed elements.
675 GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true); 682 if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
676 683 GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true);
684 }
677 BranchIfPrototypesHaveNoElements(map, if_true, if_false); 685 BranchIfPrototypesHaveNoElements(map, if_true, if_false);
678 } 686 }
679 687
680 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, 688 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
681 AllocationFlags flags, 689 AllocationFlags flags,
682 Node* top_address, 690 Node* top_address,
683 Node* limit_address) { 691 Node* limit_address) {
684 Node* top = Load(MachineType::Pointer(), top_address); 692 Node* top = Load(MachineType::Pointer(), top_address);
685 Node* limit = Load(MachineType::Pointer(), limit_address); 693 Node* limit = Load(MachineType::Pointer(), limit_address);
686 694
(...skipping 668 matching lines...) Expand 10 before | Expand all | Expand 10 after
1355 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( 1363 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1356 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { 1364 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1357 CSA_ASSERT(this, IsFixedDoubleArray(object)); 1365 CSA_ASSERT(this, IsFixedDoubleArray(object));
1358 Node* offset = 1366 Node* offset =
1359 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, 1367 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
1360 FixedArray::kHeaderSize - kHeapObjectTag); 1368 FixedArray::kHeaderSize - kHeapObjectTag);
1361 MachineRepresentation rep = MachineRepresentation::kFloat64; 1369 MachineRepresentation rep = MachineRepresentation::kFloat64;
1362 return StoreNoWriteBarrier(rep, object, offset, value); 1370 return StoreNoWriteBarrier(rep, object, offset, value);
1363 } 1371 }
1364 1372
1373 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* context,
1374 Node* array,
1375 CodeStubArguments& args,
1376 Variable& arg_index,
1377 Label* bailout) {
1378 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1379 Label pre_bailout(this);
1380 Label success(this);
1381 Variable elements(this, MachineRepresentation::kTagged);
1382 ParameterMode mode = OptimalParameterMode();
1383 Variable capacity(this, OptimalParameterRepresentation());
1384 Variable length(this, OptimalParameterRepresentation());
1385 length.Bind(UntagParameter(LoadJSArrayLength(array), mode));
1386 elements.Bind(LoadElements(array));
1387 capacity.Bind(
Jakob Kummerow 2016/11/23 17:17:06 I think |capacity| could simply be a Node*, I see
danno 2016/11/29 14:39:59 Done.
1388 UntagParameter(LoadFixedArrayBaseLength(elements.value()), mode));
1389
1390 // Resize the capacity of the fixed array if it doesn't fit.
1391 CodeStubAssembler::VariableList grow_vars({&capacity, &elements}, zone());
1392 Label fits(this, grow_vars);
1393 Node* first = arg_index.value();
1394 Node* growth = IntPtrSubFoldConstants(args.GetLength(), first);
1395 Node* new_length = IntPtrAdd(
1396 mode == INTPTR_PARAMETERS ? growth : SmiTag(growth), length.value());
1397 GotoUnless(IntPtrGreaterThanOrEqual(new_length, capacity.value()), &fits);
1398 Node* new_capacity = CalculateNewElementsCapacity(
1399 IntPtrAdd(new_length, IntPtrOrSmiConstant(1, mode)), mode);
1400 elements.Bind(GrowElementsCapacity(array, elements.value(), kind, kind,
1401 capacity.value(), new_capacity, mode,
1402 &pre_bailout));
1403 Goto(&fits);
1404 Bind(&fits);
1405
1406 // Push each argument onto the end of the array now that there is enough
1407 // capacity.
1408 CodeStubAssembler::VariableList push_vars({&length, &elements}, zone());
1409 args.ForEach(
1410 push_vars,
1411 [kind, array, context, mode, &length, &elements, &pre_bailout](
Jakob Kummerow 2016/11/23 17:17:06 nit: |array| and |context| appear to be unused, dr
danno 2016/11/29 14:39:59 Done.
1412 CodeStubAssembler* assembler, Node* arg) {
1413 if (IsFastSmiElementsKind(kind)) {
1414 assembler->GotoUnless(assembler->TaggedIsSmi(arg), &pre_bailout);
Jakob Kummerow 2016/11/23 17:17:05 nit: now that you have TaggedIsNotSmi, you could u
danno 2016/11/29 14:39:59 Done.
1415 } else if (IsFastDoubleElementsKind(kind)) {
1416 assembler->GotoIfNotNumber(arg, &pre_bailout);
1417 }
1418 if (IsFastDoubleElementsKind(kind)) {
1419 assembler->StoreFixedDoubleArrayElement(
1420 elements.value(), length.value(),
1421 assembler->ChangeNumberToFloat64(arg), mode);
1422 } else {
1423 WriteBarrierMode barrier_mode = IsFastSmiElementsKind(kind)
1424 ? SKIP_WRITE_BARRIER
1425 : UPDATE_WRITE_BARRIER;
1426 assembler->StoreFixedArrayElement(elements.value(), length.value(),
1427 arg, barrier_mode, mode);
1428 }
1429 assembler->Increment(length, 1, mode);
1430 },
1431 first, nullptr);
1432 length.Bind(TagParameter(length.value(), mode));
1433 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length.value());
1434 Goto(&success);
1435
1436 Bind(&pre_bailout);
1437 length.Bind(TagParameter(length.value(), mode));
1438 Node* diff = SmiSub(length.value(), LoadJSArrayLength(array));
1439 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length.value());
1440 arg_index.Bind(IntPtrAdd(arg_index.value(), SmiUntag(diff)));
1441 Goto(bailout);
1442
1443 Bind(&success);
1444 return length.value();
1445 }
1446
1365 Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) { 1447 Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
1366 Node* result = Allocate(HeapNumber::kSize, kNone); 1448 Node* result = Allocate(HeapNumber::kSize, kNone);
1367 Heap::RootListIndex heap_map_index = 1449 Heap::RootListIndex heap_map_index =
1368 mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex 1450 mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
1369 : Heap::kMutableHeapNumberMapRootIndex; 1451 : Heap::kMutableHeapNumberMapRootIndex;
1370 Node* map = LoadRoot(heap_map_index); 1452 Node* map = LoadRoot(heap_map_index);
1371 StoreMapNoWriteBarrier(result, map); 1453 StoreMapNoWriteBarrier(result, map);
1372 return result; 1454 return result;
1373 } 1455 }
1374 1456
(...skipping 748 matching lines...) Expand 10 before | Expand all | Expand 10 after
2123 to_index_smi == from_index_smi)); 2205 to_index_smi == from_index_smi));
2124 BuildFastLoop(vars, MachineType::PointerRepresentation(), from_offset, 2206 BuildFastLoop(vars, MachineType::PointerRepresentation(), from_offset,
2125 limit_offset, 2207 limit_offset,
2126 [from_string, to_string, &current_to_offset, to_increment, type, 2208 [from_string, to_string, &current_to_offset, to_increment, type,
2127 rep, index_same](CodeStubAssembler* assembler, Node* offset) { 2209 rep, index_same](CodeStubAssembler* assembler, Node* offset) {
2128 Node* value = assembler->Load(type, from_string, offset); 2210 Node* value = assembler->Load(type, from_string, offset);
2129 assembler->StoreNoWriteBarrier( 2211 assembler->StoreNoWriteBarrier(
2130 rep, to_string, 2212 rep, to_string,
2131 index_same ? offset : current_to_offset.value(), value); 2213 index_same ? offset : current_to_offset.value(), value);
2132 if (!index_same) { 2214 if (!index_same) {
2133 current_to_offset.Bind(assembler->IntPtrAdd( 2215 assembler->Increment(current_to_offset, to_increment,
2134 current_to_offset.value(), 2216 INTPTR_PARAMETERS);
Jakob Kummerow 2016/11/23 17:17:05 nit: INTPTR_PARAMETERS is the default, no need to
danno 2016/11/29 14:39:59 Done.
2135 assembler->IntPtrConstant(to_increment)));
2136 } 2217 }
2137 }, 2218 },
2138 from_increment, IndexAdvanceMode::kPost); 2219 from_increment, IndexAdvanceMode::kPost);
2139 } 2220 }
2140 2221
2141 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, 2222 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
2142 Node* offset, 2223 Node* offset,
2143 ElementsKind from_kind, 2224 ElementsKind from_kind,
2144 ElementsKind to_kind, 2225 ElementsKind to_kind,
2145 Label* if_hole) { 2226 Label* if_hole) {
(...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after
2547 { 2628 {
2548 // The {value} is a Smi, convert it to a String. 2629 // The {value} is a Smi, convert it to a String.
2549 Callable callable = CodeFactory::NumberToString(isolate()); 2630 Callable callable = CodeFactory::NumberToString(isolate());
2550 var_value.Bind(CallStub(callable, context, value)); 2631 var_value.Bind(CallStub(callable, context, value));
2551 Goto(&if_valueisstring); 2632 Goto(&if_valueisstring);
2552 } 2633 }
2553 Bind(&if_valueisstring); 2634 Bind(&if_valueisstring);
2554 return var_value.value(); 2635 return var_value.value();
2555 } 2636 }
2556 2637
2638 Node* CodeStubAssembler::ChangeNumberToFloat64(compiler::Node* value) {
2639 Variable result(this, MachineRepresentation::kFloat64);
2640 Label smi(this);
2641 Label done(this, &result);
2642 GotoIf(TaggedIsSmi(value), &smi);
2643 result.Bind(
2644 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
Jakob Kummerow 2016/11/23 17:17:05 This doesn't canonicalize the hole NaN. Use Float6
danno 2016/11/29 14:39:59 Done.
2645 Goto(&done);
2646
2647 Bind(&smi);
2648 {
2649 result.Bind(ChangeInt32ToFloat64(SmiUntag(value)));
2650 Goto(&done);
2651 }
2652
2653 Bind(&done);
2654 return result.value();
2655 }
2656
2557 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value, 2657 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
2558 PrimitiveType primitive_type, 2658 PrimitiveType primitive_type,
2559 char const* method_name) { 2659 char const* method_name) {
2560 // We might need to loop once due to JSValue unboxing. 2660 // We might need to loop once due to JSValue unboxing.
2561 Variable var_value(this, MachineRepresentation::kTagged); 2661 Variable var_value(this, MachineRepresentation::kTagged);
2562 Label loop(this, &var_value), done_loop(this), 2662 Label loop(this, &var_value), done_loop(this),
2563 done_throw(this, Label::kDeferred); 2663 done_throw(this, Label::kDeferred);
2564 var_value.Bind(value); 2664 var_value.Bind(value);
2565 Goto(&loop); 2665 Goto(&loop);
2566 Bind(&loop); 2666 Bind(&loop);
(...skipping 1396 matching lines...) Expand 10 before | Expand all | Expand 10 after
3963 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) { 4063 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
3964 DCHECK(delta > 0); 4064 DCHECK(delta > 0);
3965 if (FLAG_native_code_counters && counter->Enabled()) { 4065 if (FLAG_native_code_counters && counter->Enabled()) {
3966 Node* counter_address = ExternalConstant(ExternalReference(counter)); 4066 Node* counter_address = ExternalConstant(ExternalReference(counter));
3967 Node* value = Load(MachineType::Int32(), counter_address); 4067 Node* value = Load(MachineType::Int32(), counter_address);
3968 value = Int32Sub(value, Int32Constant(delta)); 4068 value = Int32Sub(value, Int32Constant(delta));
3969 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value); 4069 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
3970 } 4070 }
3971 } 4071 }
3972 4072
4073 void CodeStubAssembler::Increment(Variable& variable, int value,
4074 ParameterMode mode) {
4075 DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
4076 variable.rep() == MachineType::PointerRepresentation());
4077 DCHECK_IMPLIES(mode == SMI_PARAMETERS,
4078 variable.rep() == MachineRepresentation::kTagged ||
4079 variable.rep() == MachineRepresentation::kTaggedSigned);
4080 variable.Bind(IntPtrAdd(variable.value(), IntPtrOrSmiConstant(value, mode)));
4081 }
4082
3973 void CodeStubAssembler::Use(Label* label) { 4083 void CodeStubAssembler::Use(Label* label) {
3974 GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label); 4084 GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
3975 } 4085 }
3976 4086
3977 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex, 4087 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
3978 Variable* var_index, Label* if_keyisunique, 4088 Variable* var_index, Label* if_keyisunique,
3979 Label* if_bailout) { 4089 Label* if_bailout) {
3980 DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep()); 4090 DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
3981 Comment("TryToName"); 4091 Comment("TryToName");
3982 4092
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
4073 4183
4074 Variable var_count(this, MachineType::PointerRepresentation()); 4184 Variable var_count(this, MachineType::PointerRepresentation());
4075 Variable var_entry(this, MachineType::PointerRepresentation()); 4185 Variable var_entry(this, MachineType::PointerRepresentation());
4076 Variable* loop_vars[] = {&var_count, &var_entry, var_name_index}; 4186 Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
4077 Label loop(this, 3, loop_vars); 4187 Label loop(this, 3, loop_vars);
4078 var_count.Bind(count); 4188 var_count.Bind(count);
4079 var_entry.Bind(entry); 4189 var_entry.Bind(entry);
4080 Goto(&loop); 4190 Goto(&loop);
4081 Bind(&loop); 4191 Bind(&loop);
4082 { 4192 {
4083 Node* count = var_count.value();
4084 Node* entry = var_entry.value(); 4193 Node* entry = var_entry.value();
4085 4194
4086 Node* index = EntryToIndex<Dictionary>(entry); 4195 Node* index = EntryToIndex<Dictionary>(entry);
4087 var_name_index->Bind(index); 4196 var_name_index->Bind(index);
4088 4197
4089 Node* current = 4198 Node* current =
4090 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS); 4199 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
4091 GotoIf(WordEqual(current, undefined), if_not_found); 4200 GotoIf(WordEqual(current, undefined), if_not_found);
4092 GotoIf(WordEqual(current, unique_name), if_found); 4201 GotoIf(WordEqual(current, unique_name), if_found);
4093 4202
4094 // See Dictionary::NextProbe(). 4203 // See Dictionary::NextProbe().
4095 count = IntPtrAdd(count, IntPtrConstant(1)); 4204 Increment(var_count);
4096 entry = WordAnd(IntPtrAdd(entry, count), mask); 4205 entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
4097 4206
4098 var_count.Bind(count);
4099 var_entry.Bind(entry); 4207 var_entry.Bind(entry);
4100 Goto(&loop); 4208 Goto(&loop);
4101 } 4209 }
4102 } 4210 }
4103 4211
4104 // Instantiate template methods to workaround GCC compilation issue. 4212 // Instantiate template methods to workaround GCC compilation issue.
4105 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>( 4213 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
4106 Node*, Node*, Label*, Variable*, Label*, int); 4214 Node*, Node*, Label*, Variable*, Label*, int);
4107 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>( 4215 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
4108 Node*, Node*, Label*, Variable*, Label*, int); 4216 Node*, Node*, Label*, Variable*, Label*, int);
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
4153 Node* the_hole = TheHoleConstant(); 4261 Node* the_hole = TheHoleConstant();
4154 4262
4155 Variable var_count(this, MachineType::PointerRepresentation()); 4263 Variable var_count(this, MachineType::PointerRepresentation());
4156 Variable* loop_vars[] = {&var_count, var_entry}; 4264 Variable* loop_vars[] = {&var_count, var_entry};
4157 Label loop(this, 2, loop_vars); 4265 Label loop(this, 2, loop_vars);
4158 var_count.Bind(count); 4266 var_count.Bind(count);
4159 var_entry->Bind(entry); 4267 var_entry->Bind(entry);
4160 Goto(&loop); 4268 Goto(&loop);
4161 Bind(&loop); 4269 Bind(&loop);
4162 { 4270 {
4163 Node* count = var_count.value();
4164 Node* entry = var_entry->value(); 4271 Node* entry = var_entry->value();
4165 4272
4166 Node* index = EntryToIndex<Dictionary>(entry); 4273 Node* index = EntryToIndex<Dictionary>(entry);
4167 Node* current = 4274 Node* current =
4168 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS); 4275 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
4169 GotoIf(WordEqual(current, undefined), if_not_found); 4276 GotoIf(WordEqual(current, undefined), if_not_found);
4170 Label next_probe(this); 4277 Label next_probe(this);
4171 { 4278 {
4172 Label if_currentissmi(this), if_currentisnotsmi(this); 4279 Label if_currentissmi(this), if_currentisnotsmi(this);
4173 Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi); 4280 Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
4174 Bind(&if_currentissmi); 4281 Bind(&if_currentissmi);
4175 { 4282 {
4176 Node* current_value = SmiUntag(current); 4283 Node* current_value = SmiUntag(current);
4177 Branch(WordEqual(current_value, intptr_index), if_found, &next_probe); 4284 Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
4178 } 4285 }
4179 Bind(&if_currentisnotsmi); 4286 Bind(&if_currentisnotsmi);
4180 { 4287 {
4181 GotoIf(WordEqual(current, the_hole), &next_probe); 4288 GotoIf(WordEqual(current, the_hole), &next_probe);
4182 // Current must be the Number. 4289 // Current must be the Number.
4183 Node* current_value = LoadHeapNumberValue(current); 4290 Node* current_value = LoadHeapNumberValue(current);
4184 Branch(Float64Equal(current_value, key_as_float64), if_found, 4291 Branch(Float64Equal(current_value, key_as_float64), if_found,
4185 &next_probe); 4292 &next_probe);
4186 } 4293 }
4187 } 4294 }
4188 4295
4189 Bind(&next_probe); 4296 Bind(&next_probe);
4190 // See Dictionary::NextProbe(). 4297 // See Dictionary::NextProbe().
4191 count = IntPtrAdd(count, IntPtrConstant(1)); 4298 Increment(var_count);
4192 entry = WordAnd(IntPtrAdd(entry, count), mask); 4299 entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
4193 4300
4194 var_count.Bind(count);
4195 var_entry->Bind(entry); 4301 var_entry->Bind(entry);
4196 Goto(&loop); 4302 Goto(&loop);
4197 } 4303 }
4198 } 4304 }
4199 4305
4200 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name, 4306 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
4201 Node* descriptors, Node* nof, 4307 Node* descriptors, Node* nof,
4202 Label* if_found, 4308 Label* if_found,
4203 Variable* var_name_index, 4309 Variable* var_name_index,
4204 Label* if_not_found) { 4310 Label* if_not_found) {
(...skipping 3035 matching lines...) Expand 10 before | Expand all | Expand 10 after
7240 // loop that helps turbofan generate better code. If there's only a single 7346 // loop that helps turbofan generate better code. If there's only a single
7241 // check, then the CodeStubAssembler forces it to be at the beginning of the 7347 // check, then the CodeStubAssembler forces it to be at the beginning of the
7242 // loop requiring a backwards branch at the end of the loop (it's not possible 7348 // loop requiring a backwards branch at the end of the loop (it's not possible
7243 // to force the loop header check at the end of the loop and branch forward to 7349 // to force the loop header check at the end of the loop and branch forward to
7244 // it from the pre-header). The extra branch is slower in the case that the 7350 // it from the pre-header). The extra branch is slower in the case that the
7245 // loop actually iterates. 7351 // loop actually iterates.
7246 Branch(WordEqual(var.value(), end_index), &after_loop, &loop); 7352 Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
7247 Bind(&loop); 7353 Bind(&loop);
7248 { 7354 {
7249 if (mode == IndexAdvanceMode::kPre) { 7355 if (mode == IndexAdvanceMode::kPre) {
7250 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); 7356 Increment(var, increment);
7251 } 7357 }
7252 body(this, var.value()); 7358 body(this, var.value());
7253 if (mode == IndexAdvanceMode::kPost) { 7359 if (mode == IndexAdvanceMode::kPost) {
7254 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); 7360 Increment(var, increment);
7255 } 7361 }
7256 Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop); 7362 Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
7257 } 7363 }
7258 Bind(&after_loop); 7364 Bind(&after_loop);
7259 } 7365 }
7260 7366
7261 void CodeStubAssembler::BuildFastFixedArrayForEach( 7367 void CodeStubAssembler::BuildFastFixedArrayForEach(
7262 compiler::Node* fixed_array, ElementsKind kind, 7368 compiler::Node* fixed_array, ElementsKind kind,
7263 compiler::Node* first_element_inclusive, 7369 compiler::Node* first_element_inclusive,
7264 compiler::Node* last_element_exclusive, 7370 compiler::Node* last_element_exclusive,
(...skipping 1641 matching lines...) Expand 10 before | Expand all | Expand 10 after
8906 Node* one = Float64Constant(1.0); 9012 Node* one = Float64Constant(1.0);
8907 Node* finc_result = Float64Add(finc_value, one); 9013 Node* finc_result = Float64Add(finc_value, one);
8908 var_result.Bind(AllocateHeapNumberWithValue(finc_result)); 9014 var_result.Bind(AllocateHeapNumberWithValue(finc_result));
8909 Goto(&end); 9015 Goto(&end);
8910 } 9016 }
8911 9017
8912 Bind(&end); 9018 Bind(&end);
8913 return var_result.value(); 9019 return var_result.value();
8914 } 9020 }
8915 9021
9022 void CodeStubAssembler::GotoIfNotNumber(compiler::Node* input,
9023 Label* is_not_number) {
9024 Label is_number(this);
9025 GotoIf(TaggedIsSmi(input), &is_number);
9026 Node* input_map = LoadMap(input);
9027 Branch(Word32NotEqual(input_map, HeapNumberMapConstant()), is_not_number,
Jakob Kummerow 2016/11/23 17:17:06 Maps are words, not word32s! Even better: Branch(
danno 2016/11/29 14:39:59 Done.
9028 &is_number);
9029 Bind(&is_number);
9030 }
9031
9032 void CodeStubAssembler::GotoIfNumber(compiler::Node* input, Label* is_number) {
9033 GotoIf(TaggedIsSmi(input), is_number);
9034 Node* input_map = LoadMap(input);
9035 GotoIf(Word32Equal(input_map, HeapNumberMapConstant()), is_number);
Jakob Kummerow 2016/11/23 17:17:06 Same here
danno 2016/11/29 14:39:59 Done.
9036 }
9037
8916 compiler::Node* CodeStubAssembler::CreateArrayIterator( 9038 compiler::Node* CodeStubAssembler::CreateArrayIterator(
8917 compiler::Node* array, compiler::Node* array_map, 9039 compiler::Node* array, compiler::Node* array_map,
8918 compiler::Node* array_type, compiler::Node* context, IterationKind mode) { 9040 compiler::Node* array_type, compiler::Node* context, IterationKind mode) {
8919 int kBaseMapIndex = 0; 9041 int kBaseMapIndex = 0;
8920 switch (mode) { 9042 switch (mode) {
8921 case IterationKind::kKeys: 9043 case IterationKind::kKeys:
8922 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX; 9044 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX;
8923 break; 9045 break;
8924 case IterationKind::kValues: 9046 case IterationKind::kValues:
8925 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX; 9047 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
8962 // the receiver is a TypedArray or not. 9084 // the receiver is a TypedArray or not.
8963 9085
8964 Label if_istypedarray(this), if_isgeneric(this); 9086 Label if_istypedarray(this), if_isgeneric(this);
8965 9087
8966 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), 9088 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
8967 &if_istypedarray, &if_isgeneric); 9089 &if_istypedarray, &if_isgeneric);
8968 9090
8969 Bind(&if_isgeneric); 9091 Bind(&if_isgeneric);
8970 { 9092 {
8971 Label if_isfast(this), if_isslow(this); 9093 Label if_isfast(this), if_isslow(this);
8972 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); 9094 BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
9095 &if_isfast, &if_isslow);
8973 9096
8974 Bind(&if_isfast); 9097 Bind(&if_isfast);
8975 { 9098 {
8976 var_map_index.Bind( 9099 var_map_index.Bind(
8977 IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX)); 9100 IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX));
8978 var_array_map.Bind(array_map); 9101 var_array_map.Bind(array_map);
8979 Goto(&allocate_iterator); 9102 Goto(&allocate_iterator);
8980 } 9103 }
8981 9104
8982 Bind(&if_isslow); 9105 Bind(&if_isslow);
(...skipping 13 matching lines...) Expand all
8996 Goto(&allocate_iterator); 9119 Goto(&allocate_iterator);
8997 } 9120 }
8998 } else { 9121 } else {
8999 Label if_istypedarray(this), if_isgeneric(this); 9122 Label if_istypedarray(this), if_isgeneric(this);
9000 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)), 9123 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
9001 &if_istypedarray, &if_isgeneric); 9124 &if_istypedarray, &if_isgeneric);
9002 9125
9003 Bind(&if_isgeneric); 9126 Bind(&if_isgeneric);
9004 { 9127 {
9005 Label if_isfast(this), if_isslow(this); 9128 Label if_isfast(this), if_isslow(this);
9006 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); 9129 BranchIfFastJSArray(
9130 array, context,
9131 CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ, &if_isfast,
Jakob Kummerow 2016/11/23 17:17:06 nit: s/CodeStubAssembler:://
danno 2016/11/29 14:39:59 Done.
9132 &if_isslow);
9007 9133
9008 Bind(&if_isfast); 9134 Bind(&if_isfast);
9009 { 9135 {
9010 Label if_ispacked(this), if_isholey(this); 9136 Label if_ispacked(this), if_isholey(this);
9011 Node* elements_kind = LoadMapElementsKind(array_map); 9137 Node* elements_kind = LoadMapElementsKind(array_map);
9012 Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey, 9138 Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey,
9013 &if_ispacked); 9139 &if_ispacked);
9014 9140
9015 Bind(&if_isholey); 9141 Bind(&if_isholey);
9016 { 9142 {
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
9140 argc_, FAST_ELEMENTS, mode, 9266 argc_, FAST_ELEMENTS, mode,
9141 (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize); 9267 (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
9142 arguments_ = assembler_->IntPtrAddFoldConstants(fp_, offset); 9268 arguments_ = assembler_->IntPtrAddFoldConstants(fp_, offset);
9143 if (mode == CodeStubAssembler::INTEGER_PARAMETERS) { 9269 if (mode == CodeStubAssembler::INTEGER_PARAMETERS) {
9144 argc_ = assembler->ChangeInt32ToIntPtr(argc_); 9270 argc_ = assembler->ChangeInt32ToIntPtr(argc_);
9145 } else if (mode == CodeStubAssembler::SMI_PARAMETERS) { 9271 } else if (mode == CodeStubAssembler::SMI_PARAMETERS) {
9146 argc_ = assembler->SmiUntag(argc_); 9272 argc_ = assembler->SmiUntag(argc_);
9147 } 9273 }
9148 } 9274 }
9149 9275
9150 compiler::Node* CodeStubArguments::GetReceiver() { 9276 compiler::Node* CodeStubArguments::GetReceiver() const {
9151 return assembler_->Load(MachineType::AnyTagged(), arguments_, 9277 return assembler_->Load(MachineType::AnyTagged(), arguments_,
9152 assembler_->IntPtrConstant(kPointerSize)); 9278 assembler_->IntPtrConstant(kPointerSize));
9153 } 9279 }
9154 9280
9155 compiler::Node* CodeStubArguments::AtIndex( 9281 compiler::Node* CodeStubArguments::AtIndex(
9156 compiler::Node* index, CodeStubAssembler::ParameterMode mode) { 9282 compiler::Node* index, CodeStubAssembler::ParameterMode mode) const {
9157 typedef compiler::Node Node; 9283 typedef compiler::Node Node;
9158 Node* negated_index = assembler_->IntPtrSubFoldConstants( 9284 Node* negated_index = assembler_->IntPtrSubFoldConstants(
9159 assembler_->IntPtrOrSmiConstant(0, mode), index); 9285 assembler_->IntPtrOrSmiConstant(0, mode), index);
9160 Node* offset = 9286 Node* offset =
9161 assembler_->ElementOffsetFromIndex(negated_index, FAST_ELEMENTS, mode, 0); 9287 assembler_->ElementOffsetFromIndex(negated_index, FAST_ELEMENTS, mode, 0);
9162 return assembler_->Load(MachineType::AnyTagged(), arguments_, offset); 9288 return assembler_->Load(MachineType::AnyTagged(), arguments_, offset);
9163 } 9289 }
9164 9290
9165 compiler::Node* CodeStubArguments::AtIndex(int index) { 9291 compiler::Node* CodeStubArguments::AtIndex(int index) const {
9166 return AtIndex(assembler_->IntPtrConstant(index)); 9292 return AtIndex(assembler_->IntPtrConstant(index));
9167 } 9293 }
9168 9294
9169 void CodeStubArguments::ForEach(const CodeStubAssembler::VariableList& vars, 9295 void CodeStubArguments::ForEach(const CodeStubAssembler::VariableList& vars,
9170 CodeStubArguments::ForEachBodyFunction body, 9296 CodeStubArguments::ForEachBodyFunction body,
9171 compiler::Node* first, compiler::Node* last, 9297 compiler::Node* first, compiler::Node* last,
9172 CodeStubAssembler::ParameterMode mode) { 9298 CodeStubAssembler::ParameterMode mode) {
9173 assembler_->Comment("CodeStubArguments::ForEach"); 9299 assembler_->Comment("CodeStubArguments::ForEach");
9174 DCHECK_IMPLIES(first == nullptr || last == nullptr, 9300 DCHECK_IMPLIES(first == nullptr || last == nullptr,
9175 mode == CodeStubAssembler::INTPTR_PARAMETERS); 9301 mode == CodeStubAssembler::INTPTR_PARAMETERS);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
9214 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1)); 9340 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
9215 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1)); 9341 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
9216 9342
9217 // Check prototype chain if receiver does not have packed elements. 9343 // Check prototype chain if receiver does not have packed elements.
9218 Node* holey_elements = Word32And(elements_kind, Int32Constant(1)); 9344 Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
9219 return Word32Equal(holey_elements, Int32Constant(1)); 9345 return Word32Equal(holey_elements, Int32Constant(1));
9220 } 9346 }
9221 9347
9222 } // namespace internal 9348 } // namespace internal
9223 } // namespace v8 9349 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698