| Index: src/arm/code-stubs-arm.cc
|
| diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
|
| index d3690306bd099b64229f5e3d00079af056329835..9da518e2ced5ec653438a2ed3c5d75bafc8ed9a8 100644
|
| --- a/src/arm/code-stubs-arm.cc
|
| +++ b/src/arm/code-stubs-arm.cc
|
| @@ -82,12 +82,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
|
| // write barrier because the allocated object is in new space.
|
| __ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
|
| __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
|
| + __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
| __ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset));
|
| __ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset));
|
| __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
|
| __ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
|
| __ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
|
| __ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
|
| + __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
|
| +
|
|
|
| // Initialize the code pointer in the function to be the one
|
| // found in the shared function info object.
|
| @@ -1088,6 +1091,10 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
|
| Label not_heap_number;
|
| Register scratch = r7;
|
|
|
| + __ LoadRoot(ip, Heap::kNullValueRootIndex);
|
| + __ cmp(tos_, ip);
|
| + __ b(eq, &false_result);
|
| +
|
| // HeapNumber => false iff +0, -0, or NaN.
|
| __ ldr(scratch, FieldMemOperand(tos_, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
|
| @@ -2202,6 +2209,14 @@ Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
|
| }
|
|
|
|
|
| +Handle<Code> GetTypeRecordingBinaryOpStub(int key,
|
| + TRBinaryOpIC::TypeInfo type_info,
|
| + TRBinaryOpIC::TypeInfo result_type_info) {
|
| + UNIMPLEMENTED();
|
| + return Handle<Code>::null();
|
| +}
|
| +
|
| +
|
| void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
| // Argument is a number and is on stack and in r0.
|
| Label runtime_call;
|
| @@ -2649,7 +2664,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
|
| // r0:r1: result
|
| // sp: stack pointer
|
| // fp: frame pointer
|
| - __ LeaveExitFrame();
|
| + __ LeaveExitFrame(save_doubles_);
|
|
|
| // check if we should retry or throw exception
|
| Label retry;
|
| @@ -2698,7 +2713,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
| // builtin once.
|
|
|
| // Enter the exit frame that transitions from JavaScript to C++.
|
| - __ EnterExitFrame();
|
| + __ EnterExitFrame(save_doubles_);
|
|
|
| // r4: number of arguments (C callee-saved)
|
| // r5: pointer to builtin function (C callee-saved)
|
| @@ -2786,6 +2801,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| // Setup frame pointer for the frame to be pushed.
|
| __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
|
|
|
| +#ifdef ENABLE_LOGGING_AND_PROFILING
|
| + // If this is the outermost JS call, set js_entry_sp value.
|
| + ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
|
| + __ mov(r5, Operand(ExternalReference(js_entry_sp)));
|
| + __ ldr(r6, MemOperand(r5));
|
| + __ cmp(r6, Operand(0, RelocInfo::NONE));
|
| + __ str(fp, MemOperand(r5), eq);
|
| +#endif
|
| +
|
| // Call a faked try-block that does the invoke.
|
| __ bl(&invoke);
|
|
|
| @@ -2848,6 +2872,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| // No need to restore registers
|
| __ add(sp, sp, Operand(StackHandlerConstants::kSize));
|
|
|
| +#ifdef ENABLE_LOGGING_AND_PROFILING
|
| + // If current FP value is the same as js_entry_sp value, it means that
|
| + // the current function is the outermost.
|
| + __ mov(r5, Operand(ExternalReference(js_entry_sp)));
|
| + __ ldr(r6, MemOperand(r5));
|
| + __ cmp(fp, Operand(r6));
|
| + __ mov(r6, Operand(0, RelocInfo::NONE), LeaveCC, eq);
|
| + __ str(r6, MemOperand(r5), eq);
|
| +#endif
|
|
|
| __ bind(&exit); // r0 holds result
|
| // Restore the top frame descriptors from the stack.
|
| @@ -3438,6 +3471,95 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
|
| + const int kMaxInlineLength = 100;
|
| + Label slowcase;
|
| + Label done;
|
| + __ ldr(r1, MemOperand(sp, kPointerSize * 2));
|
| + STATIC_ASSERT(kSmiTag == 0);
|
| + STATIC_ASSERT(kSmiTagSize == 1);
|
| + __ tst(r1, Operand(kSmiTagMask));
|
| + __ b(ne, &slowcase);
|
| + __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
|
| + __ b(hi, &slowcase);
|
| + // Smi-tagging is equivalent to multiplying by 2.
|
| + // Allocate RegExpResult followed by FixedArray with size in ebx.
|
| + // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
|
| + // Elements: [Map][Length][..elements..]
|
| + // Size of JSArray with two in-object properties and the header of a
|
| + // FixedArray.
|
| + int objects_size =
|
| + (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
|
| + __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize));
|
| + __ add(r2, r5, Operand(objects_size));
|
| + __ AllocateInNewSpace(
|
| + r2, // In: Size, in words.
|
| + r0, // Out: Start of allocation (tagged).
|
| + r3, // Scratch register.
|
| + r4, // Scratch register.
|
| + &slowcase,
|
| + static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
|
| + // r0: Start of allocated area, object-tagged.
|
| + // r1: Number of elements in array, as smi.
|
| + // r5: Number of elements, untagged.
|
| +
|
| + // Set JSArray map to global.regexp_result_map().
|
| + // Set empty properties FixedArray.
|
| + // Set elements to point to FixedArray allocated right after the JSArray.
|
| + // Interleave operations for better latency.
|
| + __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
|
| + __ add(r3, r0, Operand(JSRegExpResult::kSize));
|
| + __ mov(r4, Operand(FACTORY->empty_fixed_array()));
|
| + __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
|
| + __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
|
| + __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
|
| + __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
|
| + __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| +
|
| + // Set input, index and length fields from arguments.
|
| + __ ldr(r1, MemOperand(sp, kPointerSize * 0));
|
| + __ str(r1, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
|
| + __ ldr(r1, MemOperand(sp, kPointerSize * 1));
|
| + __ str(r1, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
|
| + __ ldr(r1, MemOperand(sp, kPointerSize * 2));
|
| + __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
|
| +
|
| + // Fill out the elements FixedArray.
|
| + // r0: JSArray, tagged.
|
| + // r3: FixedArray, tagged.
|
| + // r5: Number of elements in array, untagged.
|
| +
|
| + // Set map.
|
| + __ mov(r2, Operand(FACTORY->fixed_array_map()));
|
| + __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| + // Set FixedArray length.
|
| + __ mov(r6, Operand(r5, LSL, kSmiTagSize));
|
| + __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
|
| + // Fill contents of fixed-array with the-hole.
|
| + __ mov(r2, Operand(FACTORY->the_hole_value()));
|
| + __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + // Fill fixed array elements with hole.
|
| + // r0: JSArray, tagged.
|
| + // r2: the hole.
|
| + // r3: Start of elements in FixedArray.
|
| + // r5: Number of elements to fill.
|
| + Label loop;
|
| + __ tst(r5, Operand(r5));
|
| + __ bind(&loop);
|
| + __ b(le, &done); // Jump if r1 is negative or zero.
|
| + __ sub(r5, r5, Operand(1), SetCC);
|
| + __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
|
| + __ jmp(&loop);
|
| +
|
| + __ bind(&done);
|
| + __ add(sp, sp, Operand(3 * kPointerSize));
|
| + __ Ret();
|
| +
|
| + __ bind(&slowcase);
|
| + __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
|
| +}
|
| +
|
| +
|
| void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| Label slow;
|
|
|
| @@ -4732,6 +4854,123 @@ void StringAddStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
|
| + ASSERT(state_ == CompareIC::SMIS);
|
| + Label miss;
|
| + __ orr(r2, r1, r0);
|
| + __ tst(r2, Operand(kSmiTagMask));
|
| + __ b(ne, &miss);
|
| +
|
| + if (GetCondition() == eq) {
|
| + // For equality we do not care about the sign of the result.
|
| + __ sub(r0, r0, r1, SetCC);
|
| + } else {
|
| + __ sub(r1, r1, r0, SetCC);
|
| + // Correct sign of result in case of overflow.
|
| + __ rsb(r1, r1, Operand(0), SetCC, vs);
|
| + __ mov(r0, r1);
|
| + }
|
| + __ Ret();
|
| +
|
| + __ bind(&miss);
|
| + GenerateMiss(masm);
|
| +}
|
| +
|
| +
|
| +void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
|
| + ASSERT(state_ == CompareIC::HEAP_NUMBERS);
|
| +
|
| + Label generic_stub;
|
| + Label unordered;
|
| + Label miss;
|
| + __ and_(r2, r1, Operand(r0));
|
| + __ tst(r2, Operand(kSmiTagMask));
|
| + __ b(eq, &generic_stub);
|
| +
|
| + __ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE);
|
| + __ b(ne, &miss);
|
| + __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
|
| + __ b(ne, &miss);
|
| +
|
| + // Inlining the double comparison and falling back to the general compare
|
| + // stub if NaN is involved or VFP3 is unsupported.
|
| + if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
|
| + CpuFeatures::Scope scope(VFP3);
|
| +
|
| + // Load left and right operand
|
| + __ sub(r2, r1, Operand(kHeapObjectTag));
|
| + __ vldr(d0, r2, HeapNumber::kValueOffset);
|
| + __ sub(r2, r0, Operand(kHeapObjectTag));
|
| + __ vldr(d1, r2, HeapNumber::kValueOffset);
|
| +
|
| + // Compare operands
|
| + __ vcmp(d0, d1);
|
| + __ vmrs(pc); // Move vector status bits to normal status bits.
|
| +
|
| + // Don't base result on status bits when a NaN is involved.
|
| + __ b(vs, &unordered);
|
| +
|
| + // Return a result of -1, 0, or 1, based on status bits.
|
| + __ mov(r0, Operand(EQUAL), LeaveCC, eq);
|
| + __ mov(r0, Operand(LESS), LeaveCC, lt);
|
| + __ mov(r0, Operand(GREATER), LeaveCC, gt);
|
| + __ Ret();
|
| +
|
| + __ bind(&unordered);
|
| + }
|
| +
|
| + CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
|
| + __ bind(&generic_stub);
|
| + __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
| +
|
| + __ bind(&miss);
|
| + GenerateMiss(masm);
|
| +}
|
| +
|
| +
|
| +void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
|
| + ASSERT(state_ == CompareIC::OBJECTS);
|
| + Label miss;
|
| + __ and_(r2, r1, Operand(r0));
|
| + __ tst(r2, Operand(kSmiTagMask));
|
| + __ b(eq, &miss);
|
| +
|
| + __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE);
|
| + __ b(ne, &miss);
|
| + __ CompareObjectType(r1, r2, r2, JS_OBJECT_TYPE);
|
| + __ b(ne, &miss);
|
| +
|
| + ASSERT(GetCondition() == eq);
|
| + __ sub(r0, r0, Operand(r1));
|
| + __ Ret();
|
| +
|
| + __ bind(&miss);
|
| + GenerateMiss(masm);
|
| +}
|
| +
|
| +
|
| +void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
|
| + __ Push(r1, r0);
|
| + __ push(lr);
|
| +
|
| + // Call the runtime system in a fresh internal frame.
|
| + ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
|
| + __ EnterInternalFrame();
|
| + __ Push(r1, r0);
|
| + __ mov(ip, Operand(Smi::FromInt(op_)));
|
| + __ push(ip);
|
| + __ CallExternalReference(miss, 3);
|
| + __ LeaveInternalFrame();
|
| + // Compute the entry point of the rewritten stub.
|
| + __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
|
| + // Restore registers.
|
| + __ pop(lr);
|
| + __ pop(r0);
|
| + __ pop(r1);
|
| + __ Jump(r2);
|
| +}
|
| +
|
| +
|
| #undef __
|
|
|
| } } // namespace v8::internal
|
|
|