OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 5472 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5483 | 5483 |
5484 Label fast_elements_case; | 5484 Label fast_elements_case; |
5485 __ CompareAndBranch(kind, FAST_ELEMENTS, eq, &fast_elements_case); | 5485 __ CompareAndBranch(kind, FAST_ELEMENTS, eq, &fast_elements_case); |
5486 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 5486 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
5487 | 5487 |
5488 __ Bind(&fast_elements_case); | 5488 __ Bind(&fast_elements_case); |
5489 GenerateCase(masm, FAST_ELEMENTS); | 5489 GenerateCase(masm, FAST_ELEMENTS); |
5490 } | 5490 } |
5491 | 5491 |
5492 | 5492 |
| 5493 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5494 Register context = cp; |
| 5495 Register result = x0; |
| 5496 Register slot = x2; |
| 5497 Register name = x3; |
| 5498 Label slow_case; |
| 5499 |
| 5500 // Go up the context chain to the script context. |
| 5501 for (int i = 0; i < depth(); ++i) { |
| 5502 __ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
| 5503 context = result; |
| 5504 } |
| 5505 |
| 5506 // Load the PropertyCell value at the specified slot. |
| 5507 __ Add(result, context, Operand(slot, LSL, kPointerSizeLog2)); |
| 5508 __ Ldr(result, ContextMemOperand(result)); |
| 5509 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); |
| 5510 |
| 5511 // If the result is not the_hole, return. Otherwise, handle in the runtime. |
| 5512 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case); |
| 5513 __ Ret(); |
| 5514 |
| 5515 // Fallback to runtime. |
| 5516 __ Bind(&slow_case); |
| 5517 __ SmiTag(slot); |
| 5518 __ Push(slot, name); |
| 5519 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); |
| 5520 } |
| 5521 |
| 5522 |
| 5523 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5524 Register context = cp; |
| 5525 Register value = x0; |
| 5526 Register slot = x2; |
| 5527 Register name = x3; |
| 5528 Register context_temp = x10; |
| 5529 Register cell = x10; |
| 5530 Register cell_details = x11; |
| 5531 Register cell_value = x12; |
| 5532 Register cell_value_map = x13; |
| 5533 Register value_map = x14; |
| 5534 Label fast_case, slow_case; |
| 5535 |
| 5536 if (FLAG_debug_code) { |
| 5537 __ CompareRoot(value, Heap::kTheHoleValueRootIndex); |
| 5538 __ Check(ne, kUnexpectedValue); |
| 5539 __ AssertName(name); |
| 5540 } |
| 5541 |
| 5542 // Go up the context chain to the script context. |
| 5543 for (int i = 0; i < depth(); i++) { |
| 5544 __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
| 5545 context = context_temp; |
| 5546 } |
| 5547 |
| 5548 // Load the PropertyCell at the specified slot. |
| 5549 __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2)); |
| 5550 __ Ldr(cell, ContextMemOperand(cell)); |
| 5551 |
| 5552 // Load PropertyDetails for the cell (actually only the cell_type and kind). |
| 5553 __ Ldr(cell_details, |
| 5554 UntagSmiFieldMemOperand(cell, PropertyCell::kDetailsOffset)); |
| 5555 __ And(cell_details, cell_details, |
| 5556 PropertyDetails::PropertyCellTypeField::kMask | |
| 5557 PropertyDetails::KindField::kMask); |
| 5558 |
| 5559 // Check if PropertyCell holds mutable data. |
| 5560 Label not_mutable_data; |
| 5561 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( |
| 5562 PropertyCellType::kMutable) | |
| 5563 PropertyDetails::KindField::encode(kData)); |
| 5564 __ B(ne, ¬_mutable_data); |
| 5565 __ Bind(&fast_case); |
| 5566 __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); |
| 5567 // RecordWriteField clobbers the value register, so we copy it before the |
| 5568 // call. |
| 5569 __ Mov(x11, value); |
| 5570 __ RecordWriteField(cell, PropertyCell::kValueOffset, x11, x12, |
| 5571 kLRHasNotBeenSaved, kDontSaveFPRegs); |
| 5572 __ Ret(); |
| 5573 |
| 5574 __ Bind(¬_mutable_data); |
| 5575 // Check if PropertyCell value matches the new value (relevant for Constant |
| 5576 // and ConstantType cells). |
| 5577 Label not_same_value; |
| 5578 __ Ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset)); |
| 5579 __ Cmp(cell_value, value); |
| 5580 __ B(ne, ¬_same_value); |
| 5581 |
| 5582 if (FLAG_debug_code) { |
| 5583 Label done; |
| 5584 // This can only be true for Constant and ConstantType cells, because we |
| 5585 // next store the hole via this stub. |
| 5586 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( |
| 5587 PropertyCellType::kConstant) | |
| 5588 PropertyDetails::KindField::encode(kData)); |
| 5589 __ B(eq, &done); |
| 5590 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( |
| 5591 PropertyCellType::kConstantType) | |
| 5592 PropertyDetails::KindField::encode(kData)); |
| 5593 __ Check(eq, kUnexpectedValue); |
| 5594 __ Bind(&done); |
| 5595 } |
| 5596 __ Ret(); |
| 5597 __ Bind(¬_same_value); |
| 5598 |
| 5599 // Check if PropertyCell contains data with constant type. |
| 5600 __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode( |
| 5601 PropertyCellType::kConstantType) | |
| 5602 PropertyDetails::KindField::encode(kData)); |
| 5603 __ B(ne, &slow_case); |
| 5604 |
| 5605 // Now either both old and new values must be smis or both must be heap |
| 5606 // objects with same map. |
| 5607 Label value_is_heap_object; |
| 5608 __ JumpIfNotSmi(value, &value_is_heap_object); |
| 5609 __ JumpIfNotSmi(cell_value, &slow_case); |
| 5610 // Old and new values are smis, no need for a write barrier here. |
| 5611 __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset)); |
| 5612 __ Ret(); |
| 5613 |
| 5614 __ Bind(&value_is_heap_object); |
| 5615 __ JumpIfSmi(cell_value, &slow_case); |
| 5616 |
| 5617 __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset)); |
| 5618 __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset)); |
| 5619 __ Cmp(cell_value_map, value_map); |
| 5620 __ B(eq, &fast_case); |
| 5621 |
| 5622 // Fall back to the runtime. |
| 5623 __ Bind(&slow_case); |
| 5624 __ SmiTag(slot); |
| 5625 __ Push(slot, name, value); |
| 5626 __ TailCallRuntime(is_strict(language_mode()) |
| 5627 ? Runtime::kStoreGlobalViaContext_Strict |
| 5628 : Runtime::kStoreGlobalViaContext_Sloppy, |
| 5629 3, 1); |
| 5630 } |
| 5631 |
| 5632 |
5493 // The number of register that CallApiFunctionAndReturn will need to save on | 5633 // The number of register that CallApiFunctionAndReturn will need to save on |
5494 // the stack. The space for these registers need to be allocated in the | 5634 // the stack. The space for these registers need to be allocated in the |
5495 // ExitFrame before calling CallApiFunctionAndReturn. | 5635 // ExitFrame before calling CallApiFunctionAndReturn. |
5496 static const int kCallApiFunctionSpillSpace = 4; | 5636 static const int kCallApiFunctionSpillSpace = 4; |
5497 | 5637 |
5498 | 5638 |
5499 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5639 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
5500 return static_cast<int>(ref0.address() - ref1.address()); | 5640 return static_cast<int>(ref0.address() - ref1.address()); |
5501 } | 5641 } |
5502 | 5642 |
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5833 MemOperand(fp, 6 * kPointerSize), NULL); | 5973 MemOperand(fp, 6 * kPointerSize), NULL); |
5834 } | 5974 } |
5835 | 5975 |
5836 | 5976 |
5837 #undef __ | 5977 #undef __ |
5838 | 5978 |
5839 } // namespace internal | 5979 } // namespace internal |
5840 } // namespace v8 | 5980 } // namespace v8 |
5841 | 5981 |
5842 #endif // V8_TARGET_ARCH_ARM64 | 5982 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |