| Index: src/arm/stub-cache-arm.cc
|
| ===================================================================
|
| --- src/arm/stub-cache-arm.cc (revision 9531)
|
| +++ src/arm/stub-cache-arm.cc (working copy)
|
| @@ -431,7 +431,13 @@
|
|
|
| // Update the write barrier for the array address.
|
| // Pass the now unused name_reg as a scratch register.
|
| - __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
|
| + __ mov(name_reg, r0);
|
| + __ RecordWriteField(receiver_reg,
|
| + offset,
|
| + name_reg,
|
| + scratch,
|
| + kLRHasNotBeenSaved,
|
| + kDontSaveFPRegs);
|
| } else {
|
| // Write to the properties array.
|
| int offset = index * kPointerSize + FixedArray::kHeaderSize;
|
| @@ -444,7 +450,13 @@
|
|
|
| // Update the write barrier for the array address.
|
| // Ok to clobber receiver_reg and name_reg, since we return.
|
| - __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
|
| + __ mov(name_reg, r0);
|
| + __ RecordWriteField(scratch,
|
| + offset,
|
| + name_reg,
|
| + receiver_reg,
|
| + kLRHasNotBeenSaved,
|
| + kDontSaveFPRegs);
|
| }
|
|
|
| // Return the value (register r0).
|
| @@ -553,9 +565,10 @@
|
| }
|
|
|
|
|
| -static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
|
| - const CallOptimization& optimization,
|
| - int argc) {
|
| +static MaybeObject* GenerateFastApiDirectCall(
|
| + MacroAssembler* masm,
|
| + const CallOptimization& optimization,
|
| + int argc) {
|
| // ----------- S t a t e -------------
|
| // -- sp[0] : holder (set by CheckPrototypes)
|
| // -- sp[4] : callee js function
|
| @@ -591,6 +604,8 @@
|
| ApiFunction fun(api_function_address);
|
|
|
| const int kApiStackSpace = 4;
|
| +
|
| + FrameScope frame_scope(masm, StackFrame::MANUAL);
|
| __ EnterExitFrame(false, kApiStackSpace);
|
|
|
| // r0 = v8::Arguments&
|
| @@ -616,9 +631,11 @@
|
| ExternalReference ref = ExternalReference(&fun,
|
| ExternalReference::DIRECT_API_CALL,
|
| masm->isolate());
|
| + AllowExternalCallThatCantCauseGC scope(masm);
|
| return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
|
| }
|
|
|
| +
|
| class CallInterceptorCompiler BASE_EMBEDDED {
|
| public:
|
| CallInterceptorCompiler(StubCompiler* stub_compiler,
|
| @@ -794,7 +811,7 @@
|
| miss_label);
|
|
|
| // Call a runtime function to load the interceptor property.
|
| - __ EnterInternalFrame();
|
| + FrameScope scope(masm, StackFrame::INTERNAL);
|
| // Save the name_ register across the call.
|
| __ push(name_);
|
|
|
| @@ -811,7 +828,8 @@
|
|
|
| // Restore the name_ register.
|
| __ pop(name_);
|
| - __ LeaveInternalFrame();
|
| +
|
| + // Leave the internal frame.
|
| }
|
|
|
| void LoadWithInterceptor(MacroAssembler* masm,
|
| @@ -820,18 +838,19 @@
|
| JSObject* holder_obj,
|
| Register scratch,
|
| Label* interceptor_succeeded) {
|
| - __ EnterInternalFrame();
|
| - __ Push(holder, name_);
|
| + {
|
| + FrameScope scope(masm, StackFrame::INTERNAL);
|
| + __ Push(holder, name_);
|
|
|
| - CompileCallLoadPropertyWithInterceptor(masm,
|
| - receiver,
|
| - holder,
|
| - name_,
|
| - holder_obj);
|
| + CompileCallLoadPropertyWithInterceptor(masm,
|
| + receiver,
|
| + holder,
|
| + name_,
|
| + holder_obj);
|
|
|
| - __ pop(name_); // Restore the name.
|
| - __ pop(receiver); // Restore the holder.
|
| - __ LeaveInternalFrame();
|
| + __ pop(name_); // Restore the name.
|
| + __ pop(receiver); // Restore the holder.
|
| + }
|
|
|
| // If interceptor returns no-result sentinel, call the constant function.
|
| __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
|
| @@ -1228,7 +1247,10 @@
|
| ApiFunction fun(getter_address);
|
|
|
| const int kApiStackSpace = 1;
|
| +
|
| + FrameScope frame_scope(masm(), StackFrame::MANUAL);
|
| __ EnterExitFrame(false, kApiStackSpace);
|
| +
|
| // Create AccessorInfo instance on the stack above the exit frame with
|
| // scratch2 (internal::Object **args_) as the data.
|
| __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
|
| @@ -1288,42 +1310,44 @@
|
|
|
| // Save necessary data before invoking an interceptor.
|
| // Requires a frame to make GC aware of pushed pointers.
|
| - __ EnterInternalFrame();
|
| + {
|
| + FrameScope frame_scope(masm(), StackFrame::INTERNAL);
|
|
|
| - if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
| - // CALLBACKS case needs a receiver to be passed into C++ callback.
|
| - __ Push(receiver, holder_reg, name_reg);
|
| - } else {
|
| - __ Push(holder_reg, name_reg);
|
| - }
|
| + if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
| + // CALLBACKS case needs a receiver to be passed into C++ callback.
|
| + __ Push(receiver, holder_reg, name_reg);
|
| + } else {
|
| + __ Push(holder_reg, name_reg);
|
| + }
|
|
|
| - // Invoke an interceptor. Note: map checks from receiver to
|
| - // interceptor's holder has been compiled before (see a caller
|
| - // of this method.)
|
| - CompileCallLoadPropertyWithInterceptor(masm(),
|
| - receiver,
|
| - holder_reg,
|
| - name_reg,
|
| - interceptor_holder);
|
| + // Invoke an interceptor. Note: map checks from receiver to
|
| + // interceptor's holder has been compiled before (see a caller
|
| + // of this method.)
|
| + CompileCallLoadPropertyWithInterceptor(masm(),
|
| + receiver,
|
| + holder_reg,
|
| + name_reg,
|
| + interceptor_holder);
|
|
|
| - // Check if interceptor provided a value for property. If it's
|
| - // the case, return immediately.
|
| - Label interceptor_failed;
|
| - __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
|
| - __ cmp(r0, scratch1);
|
| - __ b(eq, &interceptor_failed);
|
| - __ LeaveInternalFrame();
|
| - __ Ret();
|
| + // Check if interceptor provided a value for property. If it's
|
| + // the case, return immediately.
|
| + Label interceptor_failed;
|
| + __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
|
| + __ cmp(r0, scratch1);
|
| + __ b(eq, &interceptor_failed);
|
| + frame_scope.GenerateLeaveFrame();
|
| + __ Ret();
|
|
|
| - __ bind(&interceptor_failed);
|
| - __ pop(name_reg);
|
| - __ pop(holder_reg);
|
| - if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
| - __ pop(receiver);
|
| + __ bind(&interceptor_failed);
|
| + __ pop(name_reg);
|
| + __ pop(holder_reg);
|
| + if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
|
| + __ pop(receiver);
|
| + }
|
| +
|
| + // Leave the internal frame.
|
| }
|
|
|
| - __ LeaveInternalFrame();
|
| -
|
| // Check that the maps from interceptor's holder to lookup's holder
|
| // haven't changed. And load lookup's holder into |holder| register.
|
| if (interceptor_holder != lookup->holder()) {
|
| @@ -1556,7 +1580,7 @@
|
| DONT_DO_SMI_CHECK);
|
|
|
| if (argc == 1) { // Otherwise fall through to call the builtin.
|
| - Label exit, with_write_barrier, attempt_to_grow_elements;
|
| + Label attempt_to_grow_elements;
|
|
|
| // Get the array's length into r0 and calculate new length.
|
| __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
| @@ -1571,11 +1595,15 @@
|
| __ cmp(r0, r4);
|
| __ b(gt, &attempt_to_grow_elements);
|
|
|
| + // Check if value is a smi.
|
| + Label with_write_barrier;
|
| + __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
|
| + __ JumpIfNotSmi(r4, &with_write_barrier);
|
| +
|
| // Save new length.
|
| __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
|
|
| // Push the element.
|
| - __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
|
| // We may need a register containing the address end_elements below,
|
| // so write back the value in end_elements.
|
| __ add(end_elements, elements,
|
| @@ -1585,14 +1613,33 @@
|
| __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
|
|
|
| // Check for a smi.
|
| - __ JumpIfNotSmi(r4, &with_write_barrier);
|
| - __ bind(&exit);
|
| __ Drop(argc + 1);
|
| __ Ret();
|
|
|
| __ bind(&with_write_barrier);
|
| - __ InNewSpace(elements, r4, eq, &exit);
|
| - __ RecordWriteHelper(elements, end_elements, r4);
|
| +
|
| + if (FLAG_smi_only_arrays) {
|
| + __ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ CheckFastSmiOnlyElements(r6, r6, &call_builtin);
|
| + }
|
| +
|
| + // Save new length.
|
| + __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
| +
|
| + // Push the element.
|
| + // We may need a register containing the address end_elements below,
|
| + // so write back the value in end_elements.
|
| + __ add(end_elements, elements,
|
| + Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
|
| + __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
|
| +
|
| + __ RecordWrite(elements,
|
| + end_elements,
|
| + r4,
|
| + kLRHasNotBeenSaved,
|
| + kDontSaveFPRegs,
|
| + EMIT_REMEMBERED_SET,
|
| + OMIT_SMI_CHECK);
|
| __ Drop(argc + 1);
|
| __ Ret();
|
|
|
| @@ -1604,6 +1651,17 @@
|
| __ b(&call_builtin);
|
| }
|
|
|
| + __ ldr(r2, MemOperand(sp, (argc - 1) * kPointerSize));
|
| + if (FLAG_smi_only_arrays) {
|
| + // Growing elements that are SMI-only requires special handling in case
|
| + // the new element is non-Smi. For now, delegate to the builtin.
|
| + Label no_fast_elements_check;
|
| + __ JumpIfSmi(r2, &no_fast_elements_check);
|
| + __ ldr(r7, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ CheckFastObjectElements(r7, r7, &call_builtin);
|
| + __ bind(&no_fast_elements_check);
|
| + }
|
| +
|
| Isolate* isolate = masm()->isolate();
|
| ExternalReference new_space_allocation_top =
|
| ExternalReference::new_space_allocation_top_address(isolate);
|
| @@ -1630,8 +1688,7 @@
|
| // Update new_space_allocation_top.
|
| __ str(r6, MemOperand(r7));
|
| // Push the argument.
|
| - __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
|
| - __ str(r6, MemOperand(end_elements));
|
| + __ str(r2, MemOperand(end_elements));
|
| // Fill the rest with holes.
|
| __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
|
| for (int i = 1; i < kAllocationDelta; i++) {
|
| @@ -2713,6 +2770,15 @@
|
| // Store the value in the cell.
|
| __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
|
|
|
| + __ mov(r1, r0);
|
| + __ RecordWriteField(r4,
|
| + JSGlobalPropertyCell::kValueOffset,
|
| + r1,
|
| + r2,
|
| + kLRHasNotBeenSaved,
|
| + kDontSaveFPRegs,
|
| + OMIT_REMEMBERED_SET);
|
| +
|
| Counters* counters = masm()->isolate()->counters();
|
| __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
|
| __ Ret();
|
| @@ -3454,6 +3520,7 @@
|
| case EXTERNAL_FLOAT_ELEMENTS:
|
| case EXTERNAL_DOUBLE_ELEMENTS:
|
| case FAST_ELEMENTS:
|
| + case FAST_SMI_ONLY_ELEMENTS:
|
| case FAST_DOUBLE_ELEMENTS:
|
| case DICTIONARY_ELEMENTS:
|
| case NON_STRICT_ARGUMENTS_ELEMENTS:
|
| @@ -3540,6 +3607,7 @@
|
| }
|
| break;
|
| case FAST_ELEMENTS:
|
| + case FAST_SMI_ONLY_ELEMENTS:
|
| case FAST_DOUBLE_ELEMENTS:
|
| case DICTIONARY_ELEMENTS:
|
| case NON_STRICT_ARGUMENTS_ELEMENTS:
|
| @@ -3880,6 +3948,7 @@
|
| }
|
| break;
|
| case FAST_ELEMENTS:
|
| + case FAST_SMI_ONLY_ELEMENTS:
|
| case FAST_DOUBLE_ELEMENTS:
|
| case DICTIONARY_ELEMENTS:
|
| case NON_STRICT_ARGUMENTS_ELEMENTS:
|
| @@ -3943,6 +4012,7 @@
|
| case EXTERNAL_FLOAT_ELEMENTS:
|
| case EXTERNAL_DOUBLE_ELEMENTS:
|
| case FAST_ELEMENTS:
|
| + case FAST_SMI_ONLY_ELEMENTS:
|
| case FAST_DOUBLE_ELEMENTS:
|
| case DICTIONARY_ELEMENTS:
|
| case NON_STRICT_ARGUMENTS_ELEMENTS:
|
| @@ -4082,6 +4152,7 @@
|
| case EXTERNAL_FLOAT_ELEMENTS:
|
| case EXTERNAL_DOUBLE_ELEMENTS:
|
| case FAST_ELEMENTS:
|
| + case FAST_SMI_ONLY_ELEMENTS:
|
| case FAST_DOUBLE_ELEMENTS:
|
| case DICTIONARY_ELEMENTS:
|
| case NON_STRICT_ARGUMENTS_ELEMENTS:
|
| @@ -4234,8 +4305,10 @@
|
| }
|
|
|
|
|
| -void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
|
| - bool is_js_array) {
|
| +void KeyedStoreStubCompiler::GenerateStoreFastElement(
|
| + MacroAssembler* masm,
|
| + bool is_js_array,
|
| + ElementsKind elements_kind) {
|
| // ----------- S t a t e -------------
|
| // -- r0 : value
|
| // -- r1 : key
|
| @@ -4277,15 +4350,33 @@
|
| __ cmp(key_reg, scratch);
|
| __ b(hs, &miss_force_generic);
|
|
|
| - __ add(scratch,
|
| - elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| - STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
| - __ str(value_reg,
|
| - MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
|
| - __ RecordWrite(scratch,
|
| - Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
|
| - receiver_reg , elements_reg);
|
| -
|
| + if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
|
| + __ JumpIfNotSmi(value_reg, &miss_force_generic);
|
| + __ add(scratch,
|
| + elements_reg,
|
| + Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
| + __ add(scratch,
|
| + scratch,
|
| + Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
|
| + __ str(value_reg, MemOperand(scratch));
|
| + } else {
|
| + ASSERT(elements_kind == FAST_ELEMENTS);
|
| + __ add(scratch,
|
| + elements_reg,
|
| + Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
|
| + __ add(scratch,
|
| + scratch,
|
| + Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
|
| + __ str(value_reg, MemOperand(scratch));
|
| + __ mov(receiver_reg, value_reg);
|
| + __ RecordWrite(elements_reg, // Object.
|
| + scratch, // Address.
|
| + receiver_reg, // Value.
|
| + kLRHasNotBeenSaved,
|
| + kDontSaveFPRegs);
|
| + }
|
| // value_reg (r0) is preserved.
|
| // Done.
|
| __ Ret();
|
|
|