OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
6 | 6 |
7 #include "src/bailout-reason.h" | 7 #include "src/bailout-reason.h" |
8 #include "src/crankshaft/hydrogen.h" | 8 #include "src/crankshaft/hydrogen.h" |
9 #include "src/crankshaft/lithium.h" | 9 #include "src/crankshaft/lithium.h" |
10 #include "src/field-index.h" | 10 #include "src/field-index.h" |
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
437 HValue* closure = GetParameter(0); | 437 HValue* closure = GetParameter(0); |
438 HValue* literal_index = GetParameter(1); | 438 HValue* literal_index = GetParameter(1); |
439 | 439 |
440 // This stub is very performance sensitive, the generated code must be tuned | 440 // This stub is very performance sensitive, the generated code must be tuned |
441 // so that it doesn't build and eager frame. | 441 // so that it doesn't build and eager frame. |
442 info()->MarkMustNotHaveEagerFrame(); | 442 info()->MarkMustNotHaveEagerFrame(); |
443 | 443 |
444 HValue* literals_array = Add<HLoadNamedField>( | 444 HValue* literals_array = Add<HLoadNamedField>( |
445 closure, nullptr, HObjectAccess::ForLiteralsPointer()); | 445 closure, nullptr, HObjectAccess::ForLiteralsPointer()); |
446 HInstruction* boilerplate = Add<HLoadKeyed>( | 446 HInstruction* boilerplate = Add<HLoadKeyed>( |
447 literals_array, literal_index, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, | 447 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, |
448 LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); | 448 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); |
449 | 449 |
450 IfBuilder if_notundefined(this); | 450 IfBuilder if_notundefined(this); |
451 if_notundefined.IfNot<HCompareObjectEqAndBranch>( | 451 if_notundefined.IfNot<HCompareObjectEqAndBranch>( |
452 boilerplate, graph()->GetConstantUndefined()); | 452 boilerplate, graph()->GetConstantUndefined()); |
453 if_notundefined.Then(); | 453 if_notundefined.Then(); |
454 { | 454 { |
455 int result_size = | 455 int result_size = |
456 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 456 JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
457 HValue* result = | 457 HValue* result = |
458 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(), | 458 Add<HAllocate>(Add<HConstant>(result_size), HType::JSObject(), |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
497 HValue* literal_index = GetParameter(1); | 497 HValue* literal_index = GetParameter(1); |
498 | 498 |
499 // This stub is very performance sensitive, the generated code must be tuned | 499 // This stub is very performance sensitive, the generated code must be tuned |
500 // so that it doesn't build and eager frame. | 500 // so that it doesn't build and eager frame. |
501 info()->MarkMustNotHaveEagerFrame(); | 501 info()->MarkMustNotHaveEagerFrame(); |
502 | 502 |
503 HValue* literals_array = Add<HLoadNamedField>( | 503 HValue* literals_array = Add<HLoadNamedField>( |
504 closure, nullptr, HObjectAccess::ForLiteralsPointer()); | 504 closure, nullptr, HObjectAccess::ForLiteralsPointer()); |
505 | 505 |
506 HInstruction* allocation_site = Add<HLoadKeyed>( | 506 HInstruction* allocation_site = Add<HLoadKeyed>( |
507 literals_array, literal_index, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, | 507 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, |
508 LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); | 508 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); |
509 IfBuilder checker(this); | 509 IfBuilder checker(this); |
510 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, | 510 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, |
511 undefined); | 511 undefined); |
512 checker.Then(); | 512 checker.Then(); |
513 | 513 |
514 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( | 514 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( |
515 AllocationSite::kTransitionInfoOffset); | 515 AllocationSite::kTransitionInfoOffset); |
516 HInstruction* boilerplate = | 516 HInstruction* boilerplate = |
517 Add<HLoadNamedField>(allocation_site, nullptr, access); | 517 Add<HLoadNamedField>(allocation_site, nullptr, access); |
518 HValue* elements = AddLoadElements(boilerplate); | 518 HValue* elements = AddLoadElements(boilerplate); |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
565 template <> | 565 template <> |
566 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() { | 566 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() { |
567 HValue* undefined = graph()->GetConstantUndefined(); | 567 HValue* undefined = graph()->GetConstantUndefined(); |
568 HValue* closure = GetParameter(0); | 568 HValue* closure = GetParameter(0); |
569 HValue* literal_index = GetParameter(1); | 569 HValue* literal_index = GetParameter(1); |
570 | 570 |
571 HValue* literals_array = Add<HLoadNamedField>( | 571 HValue* literals_array = Add<HLoadNamedField>( |
572 closure, nullptr, HObjectAccess::ForLiteralsPointer()); | 572 closure, nullptr, HObjectAccess::ForLiteralsPointer()); |
573 | 573 |
574 HInstruction* allocation_site = Add<HLoadKeyed>( | 574 HInstruction* allocation_site = Add<HLoadKeyed>( |
575 literals_array, literal_index, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, | 575 literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, |
576 LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); | 576 NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); |
577 | 577 |
578 IfBuilder checker(this); | 578 IfBuilder checker(this); |
579 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, | 579 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site, |
580 undefined); | 580 undefined); |
581 checker.And(); | 581 checker.And(); |
582 | 582 |
583 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( | 583 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( |
584 AllocationSite::kTransitionInfoOffset); | 584 AllocationSite::kTransitionInfoOffset); |
585 HInstruction* boilerplate = | 585 HInstruction* boilerplate = |
586 Add<HLoadNamedField>(allocation_site, nullptr, access); | 586 Add<HLoadNamedField>(allocation_site, nullptr, access); |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
695 // next GC, and allocation sites are designed to survive several GCs anyway. | 695 // next GC, and allocation sites are designed to survive several GCs anyway. |
696 Add<HStoreNamedField>( | 696 Add<HStoreNamedField>( |
697 object, | 697 object, |
698 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), | 698 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), |
699 site); | 699 site); |
700 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(), | 700 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(), |
701 object); | 701 object); |
702 | 702 |
703 HInstruction* feedback_vector = GetParameter(0); | 703 HInstruction* feedback_vector = GetParameter(0); |
704 HInstruction* slot = GetParameter(1); | 704 HInstruction* slot = GetParameter(1); |
705 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS, | 705 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, |
706 INITIALIZING_STORE); | 706 INITIALIZING_STORE); |
707 return feedback_vector; | 707 return feedback_vector; |
708 } | 708 } |
709 | 709 |
710 | 710 |
711 Handle<Code> CreateAllocationSiteStub::GenerateCode() { | 711 Handle<Code> CreateAllocationSiteStub::GenerateCode() { |
712 return DoGenerateCode(this); | 712 return DoGenerateCode(this); |
713 } | 713 } |
714 | 714 |
715 | 715 |
(...skipping 11 matching lines...) Expand all Loading... |
727 AddStoreMapConstant(object, weak_cell_map); | 727 AddStoreMapConstant(object, weak_cell_map); |
728 | 728 |
729 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); | 729 HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); |
730 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value); | 730 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellValue(), value); |
731 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(), | 731 Add<HStoreNamedField>(object, HObjectAccess::ForWeakCellNext(), |
732 graph()->GetConstantHole()); | 732 graph()->GetConstantHole()); |
733 | 733 |
734 HInstruction* feedback_vector = | 734 HInstruction* feedback_vector = |
735 GetParameter(CreateWeakCellDescriptor::kVectorIndex); | 735 GetParameter(CreateWeakCellDescriptor::kVectorIndex); |
736 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); | 736 HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); |
737 Add<HStoreKeyed>(feedback_vector, slot, object, FAST_ELEMENTS, | 737 Add<HStoreKeyed>(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, |
738 INITIALIZING_STORE); | 738 INITIALIZING_STORE); |
739 return graph()->GetConstant0(); | 739 return graph()->GetConstant0(); |
740 } | 740 } |
741 | 741 |
742 | 742 |
743 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } | 743 Handle<Code> CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } |
744 | 744 |
745 | 745 |
746 template <> | 746 template <> |
747 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() { | 747 HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() { |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
883 } | 883 } |
884 | 884 |
885 | 885 |
886 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } | 886 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } |
887 | 887 |
888 | 888 |
889 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, | 889 HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, |
890 HValue* value) { | 890 HValue* value) { |
891 HValue* result = NULL; | 891 HValue* result = NULL; |
892 HInstruction* backing_store = | 892 HInstruction* backing_store = |
893 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, FAST_ELEMENTS, | 893 Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr, |
894 ALLOW_RETURN_HOLE); | 894 FAST_ELEMENTS, ALLOW_RETURN_HOLE); |
895 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map()); | 895 Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map()); |
896 HValue* backing_store_length = Add<HLoadNamedField>( | 896 HValue* backing_store_length = Add<HLoadNamedField>( |
897 backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); | 897 backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); |
898 IfBuilder in_unmapped_range(this); | 898 IfBuilder in_unmapped_range(this); |
899 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length, | 899 in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length, |
900 Token::LT); | 900 Token::LT); |
901 in_unmapped_range.Then(); | 901 in_unmapped_range.Then(); |
902 { | 902 { |
903 if (value == NULL) { | 903 if (value == NULL) { |
904 result = Add<HLoadKeyed>(backing_store, key, nullptr, FAST_HOLEY_ELEMENTS, | 904 result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr, |
905 NEVER_RETURN_HOLE); | 905 FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); |
906 } else { | 906 } else { |
907 Add<HStoreKeyed>(backing_store, key, value, FAST_HOLEY_ELEMENTS); | 907 Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); |
908 } | 908 } |
909 } | 909 } |
910 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); | 910 in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); |
911 in_unmapped_range.End(); | 911 in_unmapped_range.End(); |
912 return result; | 912 return result; |
913 } | 913 } |
914 | 914 |
915 | 915 |
916 HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, | 916 HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, |
917 HValue* key, | 917 HValue* key, |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
954 HValue* constant_two = Add<HConstant>(2); | 954 HValue* constant_two = Add<HConstant>(2); |
955 HValue* elements = AddLoadElements(receiver, nullptr); | 955 HValue* elements = AddLoadElements(receiver, nullptr); |
956 HValue* elements_length = Add<HLoadNamedField>( | 956 HValue* elements_length = Add<HLoadNamedField>( |
957 elements, nullptr, HObjectAccess::ForFixedArrayLength()); | 957 elements, nullptr, HObjectAccess::ForFixedArrayLength()); |
958 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two); | 958 HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two); |
959 IfBuilder in_range(this); | 959 IfBuilder in_range(this); |
960 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT); | 960 in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT); |
961 in_range.Then(); | 961 in_range.Then(); |
962 { | 962 { |
963 HValue* index = AddUncasted<HAdd>(key, constant_two); | 963 HValue* index = AddUncasted<HAdd>(key, constant_two); |
964 HInstruction* mapped_index = Add<HLoadKeyed>( | 964 HInstruction* mapped_index = |
965 elements, index, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); | 965 Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, |
| 966 ALLOW_RETURN_HOLE); |
966 | 967 |
967 IfBuilder is_valid(this); | 968 IfBuilder is_valid(this); |
968 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index, | 969 is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index, |
969 graph()->GetConstantHole()); | 970 graph()->GetConstantHole()); |
970 is_valid.Then(); | 971 is_valid.Then(); |
971 { | 972 { |
972 // TODO(mvstanton): I'd like to assert from this point, that if the | 973 // TODO(mvstanton): I'd like to assert from this point, that if the |
973 // mapped_index is not the hole that it is indeed, a smi. An unnecessary | 974 // mapped_index is not the hole that it is indeed, a smi. An unnecessary |
974 // smi check is being emitted. | 975 // smi check is being emitted. |
975 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(), | 976 HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(), |
976 nullptr, FAST_ELEMENTS); | 977 nullptr, nullptr, FAST_ELEMENTS); |
977 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); | 978 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); |
978 if (is_load) { | 979 if (is_load) { |
979 HValue* result = Add<HLoadKeyed>(the_context, mapped_index, nullptr, | 980 HValue* result = |
980 FAST_ELEMENTS, ALLOW_RETURN_HOLE); | 981 Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr, |
| 982 FAST_ELEMENTS, ALLOW_RETURN_HOLE); |
981 environment()->Push(result); | 983 environment()->Push(result); |
982 } else { | 984 } else { |
983 DCHECK(value != NULL); | 985 DCHECK(value != NULL); |
984 Add<HStoreKeyed>(the_context, mapped_index, value, FAST_ELEMENTS); | 986 Add<HStoreKeyed>(the_context, mapped_index, value, nullptr, |
| 987 FAST_ELEMENTS); |
985 environment()->Push(value); | 988 environment()->Push(value); |
986 } | 989 } |
987 } | 990 } |
988 is_valid.Else(); | 991 is_valid.Else(); |
989 { | 992 { |
990 HValue* result = UnmappedCase(elements, key, value); | 993 HValue* result = UnmappedCase(elements, key, value); |
991 environment()->Push(is_load ? result : value); | 994 environment()->Push(is_load ? result : value); |
992 } | 995 } |
993 is_valid.End(); | 996 is_valid.End(); |
994 } | 997 } |
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1319 // Now populate the elements correctly. | 1322 // Now populate the elements correctly. |
1320 LoopBuilder builder(this, | 1323 LoopBuilder builder(this, |
1321 context(), | 1324 context(), |
1322 LoopBuilder::kPostIncrement); | 1325 LoopBuilder::kPostIncrement); |
1323 HValue* start = graph()->GetConstant0(); | 1326 HValue* start = graph()->GetConstant0(); |
1324 HValue* key = builder.BeginBody(start, checked_length, Token::LT); | 1327 HValue* key = builder.BeginBody(start, checked_length, Token::LT); |
1325 HInstruction* argument_elements = Add<HArgumentsElements>(false); | 1328 HInstruction* argument_elements = Add<HArgumentsElements>(false); |
1326 HInstruction* argument = Add<HAccessArgumentsAt>( | 1329 HInstruction* argument = Add<HAccessArgumentsAt>( |
1327 argument_elements, checked_length, key); | 1330 argument_elements, checked_length, key); |
1328 | 1331 |
1329 Add<HStoreKeyed>(elements, key, argument, kind); | 1332 Add<HStoreKeyed>(elements, key, argument, nullptr, kind); |
1330 builder.EndBody(); | 1333 builder.EndBody(); |
1331 return new_object; | 1334 return new_object; |
1332 } | 1335 } |
1333 | 1336 |
1334 | 1337 |
1335 template <> | 1338 template <> |
1336 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() { | 1339 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() { |
1337 ElementsKind kind = casted_stub()->elements_kind(); | 1340 ElementsKind kind = casted_stub()->elements_kind(); |
1338 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); | 1341 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); |
1339 return BuildArrayConstructor(kind, override_mode, NONE); | 1342 return BuildArrayConstructor(kind, override_mode, NONE); |
(...skipping 592 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1932 HValue* iterator, | 1935 HValue* iterator, |
1933 int field_offset) { | 1936 int field_offset) { |
1934 // By making sure to express these loads in the form [<hvalue> + constant] | 1937 // By making sure to express these loads in the form [<hvalue> + constant] |
1935 // the keyed load can be hoisted. | 1938 // the keyed load can be hoisted. |
1936 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength); | 1939 DCHECK(field_offset >= 0 && field_offset < SharedFunctionInfo::kEntryLength); |
1937 HValue* field_slot = iterator; | 1940 HValue* field_slot = iterator; |
1938 if (field_offset > 0) { | 1941 if (field_offset > 0) { |
1939 HValue* field_offset_value = Add<HConstant>(field_offset); | 1942 HValue* field_offset_value = Add<HConstant>(field_offset); |
1940 field_slot = AddUncasted<HAdd>(iterator, field_offset_value); | 1943 field_slot = AddUncasted<HAdd>(iterator, field_offset_value); |
1941 } | 1944 } |
1942 HInstruction* field_entry = | 1945 HInstruction* field_entry = Add<HLoadKeyed>(optimized_map, field_slot, |
1943 Add<HLoadKeyed>(optimized_map, field_slot, nullptr, FAST_ELEMENTS); | 1946 nullptr, nullptr, FAST_ELEMENTS); |
1944 return field_entry; | 1947 return field_entry; |
1945 } | 1948 } |
1946 | 1949 |
1947 | 1950 |
1948 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap( | 1951 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap( |
1949 HValue* js_function, | 1952 HValue* js_function, |
1950 HValue* shared_info, | 1953 HValue* shared_info, |
1951 HValue* native_context) { | 1954 HValue* native_context) { |
1952 Counters* counters = isolate()->counters(); | 1955 Counters* counters = isolate()->counters(); |
1953 Factory* factory = isolate()->factory(); | 1956 Factory* factory = isolate()->factory(); |
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2352 int probe_base = probe * KeyedLookupCache::kEntryLength; | 2355 int probe_base = probe * KeyedLookupCache::kEntryLength; |
2353 HValue* map_index = AddUncasted<HAdd>( | 2356 HValue* map_index = AddUncasted<HAdd>( |
2354 base_index, | 2357 base_index, |
2355 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex)); | 2358 Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex)); |
2356 map_index->ClearFlag(HValue::kCanOverflow); | 2359 map_index->ClearFlag(HValue::kCanOverflow); |
2357 HValue* key_index = AddUncasted<HAdd>( | 2360 HValue* key_index = AddUncasted<HAdd>( |
2358 base_index, | 2361 base_index, |
2359 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex)); | 2362 Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex)); |
2360 key_index->ClearFlag(HValue::kCanOverflow); | 2363 key_index->ClearFlag(HValue::kCanOverflow); |
2361 HValue* map_to_check = | 2364 HValue* map_to_check = |
2362 Add<HLoadKeyed>(cache_keys, map_index, nullptr, FAST_ELEMENTS, | 2365 Add<HLoadKeyed>(cache_keys, map_index, nullptr, nullptr, |
2363 NEVER_RETURN_HOLE, 0); | 2366 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0); |
2364 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map); | 2367 lookup_if->If<HCompareObjectEqAndBranch>(map_to_check, map); |
2365 lookup_if->And(); | 2368 lookup_if->And(); |
2366 HValue* key_to_check = | 2369 HValue* key_to_check = |
2367 Add<HLoadKeyed>(cache_keys, key_index, nullptr, FAST_ELEMENTS, | 2370 Add<HLoadKeyed>(cache_keys, key_index, nullptr, nullptr, |
2368 NEVER_RETURN_HOLE, 0); | 2371 FAST_ELEMENTS, NEVER_RETURN_HOLE, 0); |
2369 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key); | 2372 lookup_if->If<HCompareObjectEqAndBranch>(key_to_check, key); |
2370 lookup_if->Then(); | 2373 lookup_if->Then(); |
2371 { | 2374 { |
2372 ExternalReference cache_field_offsets_ref = | 2375 ExternalReference cache_field_offsets_ref = |
2373 ExternalReference::keyed_lookup_cache_field_offsets(isolate()); | 2376 ExternalReference::keyed_lookup_cache_field_offsets(isolate()); |
2374 HValue* cache_field_offsets = | 2377 HValue* cache_field_offsets = |
2375 Add<HConstant>(cache_field_offsets_ref); | 2378 Add<HConstant>(cache_field_offsets_ref); |
2376 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe)); | 2379 HValue* index = AddUncasted<HAdd>(hash, Add<HConstant>(probe)); |
2377 index->ClearFlag(HValue::kCanOverflow); | 2380 index->ClearFlag(HValue::kCanOverflow); |
2378 HValue* property_index = | 2381 HValue* property_index = |
2379 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, | 2382 Add<HLoadKeyed>(cache_field_offsets, index, nullptr, cache_keys, |
2380 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0); | 2383 INT32_ELEMENTS, NEVER_RETURN_HOLE, 0); |
2381 Push(property_index); | 2384 Push(property_index); |
2382 } | 2385 } |
2383 lookup_if->Else(); | 2386 lookup_if->Else(); |
2384 } | 2387 } |
2385 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) { | 2388 for (int i = 0; i < KeyedLookupCache::kEntriesPerBucket; ++i) { |
2386 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation); | 2389 lookup_ifs[i].JoinContinuation(&inline_or_runtime_continuation); |
2387 } | 2390 } |
2388 } | 2391 } |
2389 | 2392 |
(...skipping 22 matching lines...) Expand all Loading... |
2412 return Pop(); | 2415 return Pop(); |
2413 } | 2416 } |
2414 | 2417 |
2415 | 2418 |
2416 Handle<Code> KeyedLoadGenericStub::GenerateCode() { | 2419 Handle<Code> KeyedLoadGenericStub::GenerateCode() { |
2417 return DoGenerateCode(this); | 2420 return DoGenerateCode(this); |
2418 } | 2421 } |
2419 | 2422 |
2420 } // namespace internal | 2423 } // namespace internal |
2421 } // namespace v8 | 2424 } // namespace v8 |
OLD | NEW |