| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 362 __ JumpIfSmi(object, slow_case); | 362 __ JumpIfSmi(object, slow_case); |
| 363 // Check that the object is some kind of JSObject. | 363 // Check that the object is some kind of JSObject. |
| 364 __ JumpIfObjectType(object, map, scratch1, FIRST_JS_RECEIVER_TYPE, | 364 __ JumpIfObjectType(object, map, scratch1, FIRST_JS_RECEIVER_TYPE, |
| 365 slow_case, lt); | 365 slow_case, lt); |
| 366 | 366 |
| 367 // Check that the key is a positive smi. | 367 // Check that the key is a positive smi. |
| 368 __ JumpIfNotSmi(key, slow_case); | 368 __ JumpIfNotSmi(key, slow_case); |
| 369 __ Tbnz(key, kXSignBit, slow_case); | 369 __ Tbnz(key, kXSignBit, slow_case); |
| 370 | 370 |
| 371 // Load the elements object and check its map. | 371 // Load the elements object and check its map. |
| 372 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); | 372 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); |
| 373 __ Ldr(map, FieldMemOperand(object, JSObject::kElementsOffset)); | 373 __ Ldr(map, FieldMemOperand(object, JSObject::kElementsOffset)); |
| 374 __ CheckMap(map, scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); | 374 __ CheckMap(map, scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); |
| 375 | 375 |
| 376 // Check if element is in the range of mapped arguments. If not, jump | 376 // Check if element is in the range of mapped arguments. If not, jump |
| 377 // to the unmapped lookup. | 377 // to the unmapped lookup. |
| 378 __ Ldr(scratch1, FieldMemOperand(map, FixedArray::kLengthOffset)); | 378 __ Ldr(scratch1, FieldMemOperand(map, FixedArray::kLengthOffset)); |
| 379 __ Sub(scratch1, scratch1, Operand(Smi::FromInt(2))); | 379 __ Sub(scratch1, scratch1, Operand(Smi::FromInt(2))); |
| 380 __ Cmp(key, scratch1); | 380 __ Cmp(key, scratch1); |
| 381 __ B(hs, unmapped_case); | 381 __ B(hs, unmapped_case); |
| 382 | 382 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 467 // ----------- S t a t e ------------- | 467 // ----------- S t a t e ------------- |
| 468 // -- x2 : name | 468 // -- x2 : name |
| 469 // -- lr : return address | 469 // -- lr : return address |
| 470 // -- x0 : receiver | 470 // -- x0 : receiver |
| 471 // ----------------------------------- | 471 // ----------------------------------- |
| 472 Isolate* isolate = masm->isolate(); | 472 Isolate* isolate = masm->isolate(); |
| 473 ASM_LOCATION("LoadIC::GenerateMiss"); | 473 ASM_LOCATION("LoadIC::GenerateMiss"); |
| 474 | 474 |
| 475 __ IncrementCounter(isolate->counters()->load_miss(), 1, x3, x4); | 475 __ IncrementCounter(isolate->counters()->load_miss(), 1, x3, x4); |
| 476 | 476 |
| 477 // TODO(jbramley): Does the target actually expect an argument in x3, or is | |
| 478 // this inherited from ARM's push semantics? | |
| 479 __ Mov(x3, x0); | |
| 480 __ Push(x3, x2); | |
| 481 | |
| 482 // Perform tail call to the entry. | 477 // Perform tail call to the entry. |
| 478 __ Push(x0, x2); |
| 483 ExternalReference ref = | 479 ExternalReference ref = |
| 484 ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 480 ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
| 485 __ TailCallExternalReference(ref, 2, 1); | 481 __ TailCallExternalReference(ref, 2, 1); |
| 486 } | 482 } |
| 487 | 483 |
| 488 | 484 |
| 489 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 485 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 490 // ---------- S t a t e -------------- | 486 // ---------- S t a t e -------------- |
| 491 // -- x2 : name | 487 // -- x2 : name |
| 492 // -- lr : return address | 488 // -- lr : return address |
| 493 // -- x0 : receiver | 489 // -- x0 : receiver |
| 494 // ----------------------------------- | 490 // ----------------------------------- |
| 495 | 491 |
| 496 // TODO(jbramley): Does the target actually expect an argument in x3, or is | 492 __ Push(x0, x2); |
| 497 // this inherited from ARM's push semantics? | |
| 498 __ Mov(x3, x0); | |
| 499 __ Push(x3, x2); | |
| 500 | |
| 501 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 493 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
| 502 } | 494 } |
| 503 | 495 |
| 504 | 496 |
| 505 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 497 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { |
| 506 // ---------- S t a t e -------------- | 498 // ---------- S t a t e -------------- |
| 507 // -- lr : return address | 499 // -- lr : return address |
| 508 // -- x0 : key | 500 // -- x0 : key |
| 509 // -- x1 : receiver | 501 // -- x1 : receiver |
| 510 // ----------------------------------- | 502 // ----------------------------------- |
| 511 Register result = x0; | 503 Register result = x0; |
| 512 Register key = x0; | 504 Register key = x0; |
| 513 Register receiver = x1; | 505 Register receiver = x1; |
| 514 Label miss, unmapped; | 506 Label miss, unmapped; |
| 515 | 507 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 527 __ JumpIfRoot(x2, Heap::kTheHoleValueRootIndex, &miss); | 519 __ JumpIfRoot(x2, Heap::kTheHoleValueRootIndex, &miss); |
| 528 // Move the result in x0. x0 must be preserved on miss. | 520 // Move the result in x0. x0 must be preserved on miss. |
| 529 __ Mov(result, x2); | 521 __ Mov(result, x2); |
| 530 __ Ret(); | 522 __ Ret(); |
| 531 | 523 |
| 532 __ Bind(&miss); | 524 __ Bind(&miss); |
| 533 GenerateMiss(masm); | 525 GenerateMiss(masm); |
| 534 } | 526 } |
| 535 | 527 |
| 536 | 528 |
| 537 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 529 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { |
| 538 ASM_LOCATION("KeyedStoreIC::GenerateNonStrictArguments"); | 530 ASM_LOCATION("KeyedStoreIC::GenerateSloppyArguments"); |
| 539 // ---------- S t a t e -------------- | 531 // ---------- S t a t e -------------- |
| 540 // -- lr : return address | 532 // -- lr : return address |
| 541 // -- x0 : value | 533 // -- x0 : value |
| 542 // -- x1 : key | 534 // -- x1 : key |
| 543 // -- x2 : receiver | 535 // -- x2 : receiver |
| 544 // ----------------------------------- | 536 // ----------------------------------- |
| 545 | 537 |
| 546 Label slow, notin; | 538 Label slow, notin; |
| 547 | 539 |
| 548 Register value = x0; | 540 Register value = x0; |
| (...skipping 374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 923 | 915 |
| 924 // The slow case calls into the runtime to complete the store without causing | 916 // The slow case calls into the runtime to complete the store without causing |
| 925 // an IC miss that would otherwise cause a transition to the generic stub. | 917 // an IC miss that would otherwise cause a transition to the generic stub. |
| 926 ExternalReference ref = | 918 ExternalReference ref = |
| 927 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); | 919 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); |
| 928 __ TailCallExternalReference(ref, 3, 1); | 920 __ TailCallExternalReference(ref, 3, 1); |
| 929 } | 921 } |
| 930 | 922 |
| 931 | 923 |
| 932 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, | 924 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, |
| 933 StrictModeFlag strict_mode) { | 925 StrictMode strict_mode) { |
| 934 ASM_LOCATION("KeyedStoreIC::GenerateRuntimeSetProperty"); | 926 ASM_LOCATION("KeyedStoreIC::GenerateRuntimeSetProperty"); |
| 935 // ---------- S t a t e -------------- | 927 // ---------- S t a t e -------------- |
| 936 // -- x0 : value | 928 // -- x0 : value |
| 937 // -- x1 : key | 929 // -- x1 : key |
| 938 // -- x2 : receiver | 930 // -- x2 : receiver |
| 939 // -- lr : return address | 931 // -- lr : return address |
| 940 // ----------------------------------- | 932 // ----------------------------------- |
| 941 | 933 |
| 942 // Push receiver, key and value for runtime call. | 934 // Push receiver, key and value for runtime call. |
| 943 __ Push(x2, x1, x0); | 935 __ Push(x2, x1, x0); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 978 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); | 970 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); |
| 979 __ Cmp(elements_map, | 971 __ Cmp(elements_map, |
| 980 Operand(masm->isolate()->factory()->fixed_array_map())); | 972 Operand(masm->isolate()->factory()->fixed_array_map())); |
| 981 __ B(ne, fast_double); | 973 __ B(ne, fast_double); |
| 982 } | 974 } |
| 983 | 975 |
| 984 // HOLECHECK: guards "A[i] = V" | 976 // HOLECHECK: guards "A[i] = V" |
| 985 // We have to go to the runtime if the current value is the hole because there | 977 // We have to go to the runtime if the current value is the hole because there |
| 986 // may be a callback on the element. | 978 // may be a callback on the element. |
| 987 Label holecheck_passed; | 979 Label holecheck_passed; |
| 988 // TODO(all): This address calculation is repeated later (for the store | |
| 989 // itself). We should keep the result to avoid doing the work twice. | |
| 990 __ Add(x10, elements, FixedArray::kHeaderSize - kHeapObjectTag); | 980 __ Add(x10, elements, FixedArray::kHeaderSize - kHeapObjectTag); |
| 991 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); | 981 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); |
| 992 __ Ldr(x11, MemOperand(x10)); | 982 __ Ldr(x11, MemOperand(x10)); |
| 993 __ JumpIfNotRoot(x11, Heap::kTheHoleValueRootIndex, &holecheck_passed); | 983 __ JumpIfNotRoot(x11, Heap::kTheHoleValueRootIndex, &holecheck_passed); |
| 994 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); | 984 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); |
| 995 __ bind(&holecheck_passed); | 985 __ bind(&holecheck_passed); |
| 996 | 986 |
| 997 // Smi stores don't require further checks. | 987 // Smi stores don't require further checks. |
| 998 __ JumpIfSmi(value, &finish_store); | 988 __ JumpIfSmi(value, &finish_store); |
| 999 | 989 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1032 __ Bind(fast_double); | 1022 __ Bind(fast_double); |
| 1033 if (check_map == kCheckMap) { | 1023 if (check_map == kCheckMap) { |
| 1034 // Check for fast double array case. If this fails, call through to the | 1024 // Check for fast double array case. If this fails, call through to the |
| 1035 // runtime. | 1025 // runtime. |
| 1036 __ JumpIfNotRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex, slow); | 1026 __ JumpIfNotRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex, slow); |
| 1037 } | 1027 } |
| 1038 | 1028 |
| 1039 // HOLECHECK: guards "A[i] double hole?" | 1029 // HOLECHECK: guards "A[i] double hole?" |
| 1040 // We have to see if the double version of the hole is present. If so go to | 1030 // We have to see if the double version of the hole is present. If so go to |
| 1041 // the runtime. | 1031 // the runtime. |
| 1042 // TODO(all): This address calculation was done earlier. We should keep the | |
| 1043 // result to avoid doing the work twice. | |
| 1044 __ Add(x10, elements, FixedDoubleArray::kHeaderSize - kHeapObjectTag); | 1032 __ Add(x10, elements, FixedDoubleArray::kHeaderSize - kHeapObjectTag); |
| 1045 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); | 1033 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); |
| 1046 __ Ldr(x11, MemOperand(x10)); | 1034 __ Ldr(x11, MemOperand(x10)); |
| 1047 __ CompareAndBranch(x11, kHoleNanInt64, ne, &fast_double_without_map_check); | 1035 __ CompareAndBranch(x11, kHoleNanInt64, ne, &fast_double_without_map_check); |
| 1048 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); | 1036 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); |
| 1049 | 1037 |
| 1050 __ Bind(&fast_double_without_map_check); | 1038 __ Bind(&fast_double_without_map_check); |
| 1051 __ StoreNumberToDoubleElements(value, | 1039 __ StoreNumberToDoubleElements(value, |
| 1052 key, | 1040 key, |
| 1053 elements, | 1041 elements, |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1067 // Transition the array appropriately depending on the value type. | 1055 // Transition the array appropriately depending on the value type. |
| 1068 __ Ldr(x10, FieldMemOperand(value, HeapObject::kMapOffset)); | 1056 __ Ldr(x10, FieldMemOperand(value, HeapObject::kMapOffset)); |
| 1069 __ JumpIfNotRoot(x10, Heap::kHeapNumberMapRootIndex, &non_double_value); | 1057 __ JumpIfNotRoot(x10, Heap::kHeapNumberMapRootIndex, &non_double_value); |
| 1070 | 1058 |
| 1071 // Value is a double. Transition FAST_SMI_ELEMENTS -> | 1059 // Value is a double. Transition FAST_SMI_ELEMENTS -> |
| 1072 // FAST_DOUBLE_ELEMENTS and complete the store. | 1060 // FAST_DOUBLE_ELEMENTS and complete the store. |
| 1073 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | 1061 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 1074 FAST_DOUBLE_ELEMENTS, | 1062 FAST_DOUBLE_ELEMENTS, |
| 1075 receiver_map, | 1063 receiver_map, |
| 1076 x10, | 1064 x10, |
| 1065 x11, |
| 1077 slow); | 1066 slow); |
| 1078 ASSERT(receiver_map.Is(x3)); // Transition code expects map in x3. | 1067 ASSERT(receiver_map.Is(x3)); // Transition code expects map in x3. |
| 1079 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, | 1068 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, |
| 1080 FAST_DOUBLE_ELEMENTS); | 1069 FAST_DOUBLE_ELEMENTS); |
| 1081 ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); | 1070 ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow); |
| 1082 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 1071 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 1083 __ B(&fast_double_without_map_check); | 1072 __ B(&fast_double_without_map_check); |
| 1084 | 1073 |
| 1085 __ Bind(&non_double_value); | 1074 __ Bind(&non_double_value); |
| 1086 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS. | 1075 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS. |
| 1087 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, | 1076 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 1088 FAST_ELEMENTS, | 1077 FAST_ELEMENTS, |
| 1089 receiver_map, | 1078 receiver_map, |
| 1090 x10, | 1079 x10, |
| 1080 x11, |
| 1091 slow); | 1081 slow); |
| 1092 ASSERT(receiver_map.Is(x3)); // Transition code expects map in x3. | 1082 ASSERT(receiver_map.Is(x3)); // Transition code expects map in x3. |
| 1093 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); | 1083 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); |
| 1094 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, | 1084 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode, |
| 1095 slow); | 1085 slow); |
| 1096 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 1086 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 1097 __ B(&finish_store); | 1087 __ B(&finish_store); |
| 1098 | 1088 |
| 1099 __ Bind(&transition_double_elements); | 1089 __ Bind(&transition_double_elements); |
| 1100 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a | 1090 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a |
| 1101 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and | 1091 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and |
| 1102 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS | 1092 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS |
| 1103 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, | 1093 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, |
| 1104 FAST_ELEMENTS, | 1094 FAST_ELEMENTS, |
| 1105 receiver_map, | 1095 receiver_map, |
| 1106 x10, | 1096 x10, |
| 1097 x11, |
| 1107 slow); | 1098 slow); |
| 1108 ASSERT(receiver_map.Is(x3)); // Transition code expects map in x3. | 1099 ASSERT(receiver_map.Is(x3)); // Transition code expects map in x3. |
| 1109 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); | 1100 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); |
| 1110 ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); | 1101 ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow); |
| 1111 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 1102 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
| 1112 __ B(&finish_store); | 1103 __ B(&finish_store); |
| 1113 } | 1104 } |
| 1114 | 1105 |
| 1115 | 1106 |
| 1116 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | 1107 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, |
| 1117 StrictModeFlag strict_mode) { | 1108 StrictMode strict_mode) { |
| 1118 ASM_LOCATION("KeyedStoreIC::GenerateGeneric"); | 1109 ASM_LOCATION("KeyedStoreIC::GenerateGeneric"); |
| 1119 // ---------- S t a t e -------------- | 1110 // ---------- S t a t e -------------- |
| 1120 // -- x0 : value | 1111 // -- x0 : value |
| 1121 // -- x1 : key | 1112 // -- x1 : key |
| 1122 // -- x2 : receiver | 1113 // -- x2 : receiver |
| 1123 // -- lr : return address | 1114 // -- lr : return address |
| 1124 // ----------------------------------- | 1115 // ----------------------------------- |
| 1125 Label slow; | 1116 Label slow; |
| 1126 Label array; | 1117 Label array; |
| 1127 Label fast_object; | 1118 Label fast_object; |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1273 __ Ret(); | 1264 __ Ret(); |
| 1274 | 1265 |
| 1275 // Cache miss: Jump to runtime. | 1266 // Cache miss: Jump to runtime. |
| 1276 __ Bind(&miss); | 1267 __ Bind(&miss); |
| 1277 __ IncrementCounter(counters->store_normal_miss(), 1, x4, x5); | 1268 __ IncrementCounter(counters->store_normal_miss(), 1, x4, x5); |
| 1278 GenerateMiss(masm); | 1269 GenerateMiss(masm); |
| 1279 } | 1270 } |
| 1280 | 1271 |
| 1281 | 1272 |
| 1282 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, | 1273 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, |
| 1283 StrictModeFlag strict_mode) { | 1274 StrictMode strict_mode) { |
| 1284 ASM_LOCATION("StoreIC::GenerateRuntimeSetProperty"); | 1275 ASM_LOCATION("StoreIC::GenerateRuntimeSetProperty"); |
| 1285 // ----------- S t a t e ------------- | 1276 // ----------- S t a t e ------------- |
| 1286 // -- x0 : value | 1277 // -- x0 : value |
| 1287 // -- x1 : receiver | 1278 // -- x1 : receiver |
| 1288 // -- x2 : name | 1279 // -- x2 : name |
| 1289 // -- lr : return address | 1280 // -- lr : return address |
| 1290 // ----------------------------------- | 1281 // ----------------------------------- |
| 1291 | 1282 |
| 1292 __ Push(x1, x2, x0); | 1283 __ Push(x1, x2, x0); |
| 1293 | 1284 |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1404 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); | 1395 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); |
| 1405 // This is JumpIfSmi(smi_reg, branch_imm). | 1396 // This is JumpIfSmi(smi_reg, branch_imm). |
| 1406 patcher.tbz(smi_reg, 0, branch_imm); | 1397 patcher.tbz(smi_reg, 0, branch_imm); |
| 1407 } | 1398 } |
| 1408 } | 1399 } |
| 1409 | 1400 |
| 1410 | 1401 |
| 1411 } } // namespace v8::internal | 1402 } } // namespace v8::internal |
| 1412 | 1403 |
| 1413 #endif // V8_TARGET_ARCH_A64 | 1404 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |