| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 5480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5491 | 5491 |
| 5492 __ Bind(&fast_elements_case); | 5492 __ Bind(&fast_elements_case); |
| 5493 GenerateCase(masm, FAST_ELEMENTS); | 5493 GenerateCase(masm, FAST_ELEMENTS); |
| 5494 } | 5494 } |
| 5495 | 5495 |
| 5496 | 5496 |
| 5497 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5497 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5498 Register context = cp; | 5498 Register context = cp; |
| 5499 Register result = x0; | 5499 Register result = x0; |
| 5500 Register slot = x2; | 5500 Register slot = x2; |
| 5501 Register name = x3; | |
| 5502 Label slow_case; | 5501 Label slow_case; |
| 5503 | 5502 |
| 5504 // Go up the context chain to the script context. | 5503 // Go up the context chain to the script context. |
| 5505 for (int i = 0; i < depth(); ++i) { | 5504 for (int i = 0; i < depth(); ++i) { |
| 5506 __ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | 5505 __ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
| 5507 context = result; | 5506 context = result; |
| 5508 } | 5507 } |
| 5509 | 5508 |
| 5510 // Load the PropertyCell value at the specified slot. | 5509 // Load the PropertyCell value at the specified slot. |
| 5511 __ Add(result, context, Operand(slot, LSL, kPointerSizeLog2)); | 5510 __ Add(result, context, Operand(slot, LSL, kPointerSizeLog2)); |
| 5512 __ Ldr(result, ContextMemOperand(result)); | 5511 __ Ldr(result, ContextMemOperand(result)); |
| 5513 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); | 5512 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); |
| 5514 | 5513 |
| 5515 // If the result is not the_hole, return. Otherwise, handle in the runtime. | 5514 // If the result is not the_hole, return. Otherwise, handle in the runtime. |
| 5516 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); | 5515 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); |
| 5517 __ Ret(); | 5516 __ Ret(); |
| 5518 | 5517 |
| 5519 // Fallback to runtime. | 5518 // Fallback to runtime. |
| 5520 __ Bind(&slow_case); | 5519 __ Bind(&slow_case); |
| 5521 __ SmiTag(slot); | 5520 __ SmiTag(slot); |
| 5522 __ Push(slot, name); | 5521 __ Push(slot); |
| 5523 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); | 5522 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); |
| 5524 } | 5523 } |
| 5525 | 5524 |
| 5526 | 5525 |
| 5527 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5526 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5528 Register context = cp; | 5527 Register context = cp; |
| 5529 Register value = x0; | 5528 Register value = x0; |
| 5530 Register slot = x2; | 5529 Register slot = x2; |
| 5531 Register name = x3; | |
| 5532 Register context_temp = x10; | 5530 Register context_temp = x10; |
| 5533 Register cell = x10; | 5531 Register cell = x10; |
| 5534 Register cell_details = x11; | 5532 Register cell_details = x11; |
| 5535 Register cell_value = x12; | 5533 Register cell_value = x12; |
| 5536 Register cell_value_map = x13; | 5534 Register cell_value_map = x13; |
| 5537 Register value_map = x14; | 5535 Register value_map = x14; |
| 5538 Label fast_heapobject_case, fast_smi_case, slow_case; | 5536 Label fast_heapobject_case, fast_smi_case, slow_case; |
| 5539 | 5537 |
| 5540 if (FLAG_debug_code) { | 5538 if (FLAG_debug_code) { |
| 5541 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); | 5539 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); |
| 5542 __ Check(ne, kUnexpectedValue); | 5540 __ Check(ne, kUnexpectedValue); |
| 5543 __ AssertName(name); | |
| 5544 } | 5541 } |
| 5545 | 5542 |
| 5546 // Go up the context chain to the script context. | 5543 // Go up the context chain to the script context. |
| 5547 for (int i = 0; i < depth(); i++) { | 5544 for (int i = 0; i < depth(); i++) { |
| 5548 __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | 5545 __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
| 5549 context = context_temp; | 5546 context = context_temp; |
| 5550 } | 5547 } |
| 5551 | 5548 |
| 5552 // Load the PropertyCell at the specified slot. | 5549 // Load the PropertyCell at the specified slot. |
| 5553 __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); | 5550 __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5632 __ JumpIfSmi(cell_value, &slow_case); | 5629 __ JumpIfSmi(cell_value, &slow_case); |
| 5633 | 5630 |
| 5634 __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset)); | 5631 __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset)); |
| 5635 __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); | 5632 __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); |
| 5636 __ Cmp(cell_value_map, value_map); | 5633 __ Cmp(cell_value_map, value_map); |
| 5637 __ B(eq, &fast_heapobject_case); | 5634 __ B(eq, &fast_heapobject_case); |
| 5638 | 5635 |
| 5639 // Fall back to the runtime. | 5636 // Fall back to the runtime. |
| 5640 __ Bind(&slow_case); | 5637 __ Bind(&slow_case); |
| 5641 __ SmiTag(slot); | 5638 __ SmiTag(slot); |
| 5642 __ Push(slot, name, value); | 5639 __ Push(slot, value); |
| 5643 __ TailCallRuntime(is_strict(language_mode()) | 5640 __ TailCallRuntime(is_strict(language_mode()) |
| 5644 ? Runtime::kStoreGlobalViaContext_Strict | 5641 ? Runtime::kStoreGlobalViaContext_Strict |
| 5645 : Runtime::kStoreGlobalViaContext_Sloppy, | 5642 : Runtime::kStoreGlobalViaContext_Sloppy, |
| 5646 3, 1); | 5643 2, 1); |
| 5647 } | 5644 } |
| 5648 | 5645 |
| 5649 | 5646 |
| 5650 // The number of register that CallApiFunctionAndReturn will need to save on | 5647 // The number of register that CallApiFunctionAndReturn will need to save on |
| 5651 // the stack. The space for these registers need to be allocated in the | 5648 // the stack. The space for these registers need to be allocated in the |
| 5652 // ExitFrame before calling CallApiFunctionAndReturn. | 5649 // ExitFrame before calling CallApiFunctionAndReturn. |
| 5653 static const int kCallApiFunctionSpillSpace = 4; | 5650 static const int kCallApiFunctionSpillSpace = 4; |
| 5654 | 5651 |
| 5655 | 5652 |
| 5656 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5653 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5990 MemOperand(fp, 6 * kPointerSize), NULL); | 5987 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5991 } | 5988 } |
| 5992 | 5989 |
| 5993 | 5990 |
| 5994 #undef __ | 5991 #undef __ |
| 5995 | 5992 |
| 5996 } // namespace internal | 5993 } // namespace internal |
| 5997 } // namespace v8 | 5994 } // namespace v8 |
| 5998 | 5995 |
| 5999 #endif // V8_TARGET_ARCH_ARM64 | 5996 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |