| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 456 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 467 // ----------- S t a t e ------------- | 467 // ----------- S t a t e ------------- |
| 468 // -- x2 : name | 468 // -- x2 : name |
| 469 // -- lr : return address | 469 // -- lr : return address |
| 470 // -- x0 : receiver | 470 // -- x0 : receiver |
| 471 // ----------------------------------- | 471 // ----------------------------------- |
| 472 Isolate* isolate = masm->isolate(); | 472 Isolate* isolate = masm->isolate(); |
| 473 ASM_LOCATION("LoadIC::GenerateMiss"); | 473 ASM_LOCATION("LoadIC::GenerateMiss"); |
| 474 | 474 |
| 475 __ IncrementCounter(isolate->counters()->load_miss(), 1, x3, x4); | 475 __ IncrementCounter(isolate->counters()->load_miss(), 1, x3, x4); |
| 476 | 476 |
| 477 // TODO(jbramley): Does the target actually expect an argument in x3, or is | |
| 478 // this inherited from ARM's push semantics? | |
| 479 __ Mov(x3, x0); | |
| 480 __ Push(x3, x2); | |
| 481 | |
| 482 // Perform tail call to the entry. | 477 // Perform tail call to the entry. |
| 478 __ Push(x0, x2); |
| 483 ExternalReference ref = | 479 ExternalReference ref = |
| 484 ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 480 ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
| 485 __ TailCallExternalReference(ref, 2, 1); | 481 __ TailCallExternalReference(ref, 2, 1); |
| 486 } | 482 } |
| 487 | 483 |
| 488 | 484 |
| 489 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 485 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 490 // ---------- S t a t e -------------- | 486 // ---------- S t a t e -------------- |
| 491 // -- x2 : name | 487 // -- x2 : name |
| 492 // -- lr : return address | 488 // -- lr : return address |
| 493 // -- x0 : receiver | 489 // -- x0 : receiver |
| 494 // ----------------------------------- | 490 // ----------------------------------- |
| 495 | 491 |
| 496 // TODO(jbramley): Does the target actually expect an argument in x3, or is | 492 __ Push(x0, x2); |
| 497 // this inherited from ARM's push semantics? | |
| 498 __ Mov(x3, x0); | |
| 499 __ Push(x3, x2); | |
| 500 | |
| 501 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 493 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
| 502 } | 494 } |
| 503 | 495 |
| 504 | 496 |
| 505 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 497 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { |
| 506 // ---------- S t a t e -------------- | 498 // ---------- S t a t e -------------- |
| 507 // -- lr : return address | 499 // -- lr : return address |
| 508 // -- x0 : key | 500 // -- x0 : key |
| 509 // -- x1 : receiver | 501 // -- x1 : receiver |
| 510 // ----------------------------------- | 502 // ----------------------------------- |
| (...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 978 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); | 970 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); |
| 979 __ Cmp(elements_map, | 971 __ Cmp(elements_map, |
| 980 Operand(masm->isolate()->factory()->fixed_array_map())); | 972 Operand(masm->isolate()->factory()->fixed_array_map())); |
| 981 __ B(ne, fast_double); | 973 __ B(ne, fast_double); |
| 982 } | 974 } |
| 983 | 975 |
| 984 // HOLECHECK: guards "A[i] = V" | 976 // HOLECHECK: guards "A[i] = V" |
| 985 // We have to go to the runtime if the current value is the hole because there | 977 // We have to go to the runtime if the current value is the hole because there |
| 986 // may be a callback on the element. | 978 // may be a callback on the element. |
| 987 Label holecheck_passed; | 979 Label holecheck_passed; |
| 988 // TODO(all): This address calculation is repeated later (for the store | |
| 989 // itself). We should keep the result to avoid doing the work twice. | |
| 990 __ Add(x10, elements, FixedArray::kHeaderSize - kHeapObjectTag); | 980 __ Add(x10, elements, FixedArray::kHeaderSize - kHeapObjectTag); |
| 991 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); | 981 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); |
| 992 __ Ldr(x11, MemOperand(x10)); | 982 __ Ldr(x11, MemOperand(x10)); |
| 993 __ JumpIfNotRoot(x11, Heap::kTheHoleValueRootIndex, &holecheck_passed); | 983 __ JumpIfNotRoot(x11, Heap::kTheHoleValueRootIndex, &holecheck_passed); |
| 994 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); | 984 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); |
| 995 __ bind(&holecheck_passed); | 985 __ bind(&holecheck_passed); |
| 996 | 986 |
| 997 // Smi stores don't require further checks. | 987 // Smi stores don't require further checks. |
| 998 __ JumpIfSmi(value, &finish_store); | 988 __ JumpIfSmi(value, &finish_store); |
| 999 | 989 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1032 __ Bind(fast_double); | 1022 __ Bind(fast_double); |
| 1033 if (check_map == kCheckMap) { | 1023 if (check_map == kCheckMap) { |
| 1034 // Check for fast double array case. If this fails, call through to the | 1024 // Check for fast double array case. If this fails, call through to the |
| 1035 // runtime. | 1025 // runtime. |
| 1036 __ JumpIfNotRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex, slow); | 1026 __ JumpIfNotRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex, slow); |
| 1037 } | 1027 } |
| 1038 | 1028 |
| 1039 // HOLECHECK: guards "A[i] double hole?" | 1029 // HOLECHECK: guards "A[i] double hole?" |
| 1040 // We have to see if the double version of the hole is present. If so go to | 1030 // We have to see if the double version of the hole is present. If so go to |
| 1041 // the runtime. | 1031 // the runtime. |
| 1042 // TODO(all): This address calculation was done earlier. We should keep the | |
| 1043 // result to avoid doing the work twice. | |
| 1044 __ Add(x10, elements, FixedDoubleArray::kHeaderSize - kHeapObjectTag); | 1032 __ Add(x10, elements, FixedDoubleArray::kHeaderSize - kHeapObjectTag); |
| 1045 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); | 1033 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2)); |
| 1046 __ Ldr(x11, MemOperand(x10)); | 1034 __ Ldr(x11, MemOperand(x10)); |
| 1047 __ CompareAndBranch(x11, kHoleNanInt64, ne, &fast_double_without_map_check); | 1035 __ CompareAndBranch(x11, kHoleNanInt64, ne, &fast_double_without_map_check); |
| 1048 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); | 1036 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow); |
| 1049 | 1037 |
| 1050 __ Bind(&fast_double_without_map_check); | 1038 __ Bind(&fast_double_without_map_check); |
| 1051 __ StoreNumberToDoubleElements(value, | 1039 __ StoreNumberToDoubleElements(value, |
| 1052 key, | 1040 key, |
| 1053 elements, | 1041 elements, |
| (...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1404 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); | 1392 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); |
| 1405 // This is JumpIfSmi(smi_reg, branch_imm). | 1393 // This is JumpIfSmi(smi_reg, branch_imm). |
| 1406 patcher.tbz(smi_reg, 0, branch_imm); | 1394 patcher.tbz(smi_reg, 0, branch_imm); |
| 1407 } | 1395 } |
| 1408 } | 1396 } |
| 1409 | 1397 |
| 1410 | 1398 |
| 1411 } } // namespace v8::internal | 1399 } } // namespace v8::internal |
| 1412 | 1400 |
| 1413 #endif // V8_TARGET_ARCH_A64 | 1401 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |