Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(53)

Unified Diff: src/arm64/code-stubs-arm64.cc

Issue 1238143002: [stubs] Optimize LoadGlobalViaContextStub and StoreGlobalViaContextStub. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix ARM typo. Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: src/arm64/code-stubs-arm64.cc
diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
index 01189f21212cf18f9694698e0171bb510fca3a49..d0ee80e68c47e1dccd750f7e35c1614e8a5a30e6 100644
--- a/src/arm64/code-stubs-arm64.cc
+++ b/src/arm64/code-stubs-arm64.cc
@@ -5494,6 +5494,153 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
+void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context = cp;
+ Register result = x0;
+ Register slot = x2;
+ Register name = x3;
+ Label slow_case;
+
+ // Go up the context chain to the script context.
+ for (int i = 0; i < depth(); ++i) {
+ __ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
+ context = result;
+ }
+
+ // Load the PropertyCell value at the specified slot.
+ __ Add(result, context, Operand(slot, LSL, kPointerSizeLog2));
+ __ Ldr(result, ContextMemOperand(result));
+ __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
+
+ // If the result is not the_hole, return. Otherwise, handle in the runtime.
+ __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case);
+ __ Ret();
+
+ // Fallback to runtime.
+ __ Bind(&slow_case);
+ __ SmiTag(slot);
+ __ Push(slot, name);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
+}
+
+
+void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
+ Register context = cp;
+ Register value = x0;
+ Register slot = x2;
+ Register name = x3;
+ Register context_temp = x10;
+ Register cell = x10;
+ Register cell_details = x11;
+ Register cell_value = x12;
+ Register cell_value_map = x13;
+ Register value_map = x14;
+ Label fast_heapobject_case, fast_smi_case, slow_case;
+
+ if (FLAG_debug_code) {
+ __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
+ __ Check(ne, kUnexpectedValue);
+ __ AssertName(name);
+ }
+
+ // Go up the context chain to the script context.
+ for (int i = 0; i < depth(); i++) {
+ __ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
+ context = context_temp;
+ }
+
+ // Load the PropertyCell at the specified slot.
+ __ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
+ __ Ldr(cell, ContextMemOperand(cell));
+
+ // Load PropertyDetails for the cell (actually only the cell_type and kind).
+ __ Ldr(cell_details,
+ UntagSmiFieldMemOperand(cell, PropertyCell::kDetailsOffset));
+ __ And(cell_details, cell_details,
+ PropertyDetails::PropertyCellTypeField::kMask |
+ PropertyDetails::KindField::kMask);
+
+ // Check if PropertyCell holds mutable data.
+ Label not_mutable_data;
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kMutable) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(ne, &not_mutable_data);
+ __ JumpIfSmi(value, &fast_smi_case);
+ __ Bind(&fast_heapobject_case);
+ __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ // RecordWriteField clobbers the value register, so we copy it before the
+ // call.
+ __ Mov(x11, value);
+ __ RecordWriteField(cell, PropertyCell::kValueOffset, x11, x12,
+ kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ Ret();
+
+ __ Bind(&not_mutable_data);
+ // Check if PropertyCell value matches the new value (relevant for Constant,
+ // ConstantType and Undefined cells).
+ Label not_same_value;
+ __ Ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ __ Cmp(cell_value, value);
+ __ B(ne, &not_same_value);
+
+ if (FLAG_debug_code) {
+ Label done;
+ // This can only be true for Constant, ConstantType and Undefined cells,
+ // because we never store the_hole via this stub.
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstant) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(eq, &done);
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(eq, &done);
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kUndefined) |
+ PropertyDetails::KindField::encode(kData));
+ __ Check(eq, kUnexpectedValue);
+ __ Bind(&done);
+ }
+ __ Ret();
+ __ Bind(&not_same_value);
+
+ // Check if PropertyCell contains data with constant type.
+ __ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
+ PropertyCellType::kConstantType) |
+ PropertyDetails::KindField::encode(kData));
+ __ B(ne, &slow_case);
+
+ // Now either both old and new values must be smis or both must be heap
+ // objects with same map.
+ Label value_is_heap_object;
+ __ JumpIfNotSmi(value, &value_is_heap_object);
+ __ JumpIfNotSmi(cell_value, &slow_case);
+ // Old and new values are smis, no need for a write barrier here.
+ __ Bind(&fast_smi_case);
+ __ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
+ __ Ret();
+
+ __ Bind(&value_is_heap_object);
+ __ JumpIfSmi(cell_value, &slow_case);
+
+ __ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
+ __ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ Cmp(cell_value_map, value_map);
+ __ B(eq, &fast_heapobject_case);
+
+ // Fall back to the runtime.
+ __ Bind(&slow_case);
+ __ SmiTag(slot);
+ __ Push(slot, name, value);
+ __ TailCallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 3, 1);
+}
+
+
// The number of register that CallApiFunctionAndReturn will need to save on
// the stack. The space for these registers need to be allocated in the
// ExitFrame before calling CallApiFunctionAndReturn.

Powered by Google App Engine
This is Rietveld 408576698