| Index: runtime/vm/stub_code_arm.cc
|
| diff --git a/runtime/vm/stub_code_arm.cc b/runtime/vm/stub_code_arm.cc
|
| index 6831e9a42a9a1d74316253f2dec75d4501bac51c..10722ccf45f2d5e0dbb720e4472563064df7d511 100644
|
| --- a/runtime/vm/stub_code_arm.cc
|
| +++ b/runtime/vm/stub_code_arm.cc
|
| @@ -23,8 +23,10 @@
|
| namespace dart {
|
|
|
| DEFINE_FLAG(bool, inline_alloc, true, "Inline allocation of objects.");
|
| -DEFINE_FLAG(bool, use_slow_path, false,
|
| - "Set to true for debugging & verifying the slow paths.");
|
| +DEFINE_FLAG(bool,
|
| + use_slow_path,
|
| + false,
|
| + "Set to true for debugging & verifying the slow paths.");
|
| DECLARE_FLAG(bool, trace_optimized_ic_calls);
|
|
|
| // Input parameters:
|
| @@ -47,7 +49,8 @@ void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) {
|
| __ StoreToOffset(kWord, FP, THR, Thread::top_exit_frame_info_offset());
|
|
|
| #if defined(DEBUG)
|
| - { Label ok;
|
| + {
|
| + Label ok;
|
| // Check that we are always entering from Dart code.
|
| __ LoadFromOffset(kWord, R8, THR, Thread::vm_tag_offset());
|
| __ CompareImmediate(R8, VMTag::kDartTagId);
|
| @@ -139,7 +142,8 @@ void StubCode::GenerateCallNativeCFunctionStub(Assembler* assembler) {
|
| __ StoreToOffset(kWord, FP, THR, Thread::top_exit_frame_info_offset());
|
|
|
| #if defined(DEBUG)
|
| - { Label ok;
|
| + {
|
| + Label ok;
|
| // Check that we are always entering from Dart code.
|
| __ LoadFromOffset(kWord, R8, THR, Thread::vm_tag_offset());
|
| __ CompareImmediate(R8, VMTag::kDartTagId);
|
| @@ -179,7 +183,7 @@ void StubCode::GenerateCallNativeCFunctionStub(Assembler* assembler) {
|
| // Passing the structure by value as in runtime calls would require changing
|
| // Dart API for native functions.
|
| // For now, space is reserved on the stack and we pass a pointer to it.
|
| - __ stm(IA, SP, (1 << R0) | (1 << R1) | (1 << R2) | (1 << R3));
|
| + __ stm(IA, SP, (1 << R0) | (1 << R1) | (1 << R2) | (1 << R3));
|
| __ mov(R0, Operand(SP)); // Pass the pointer to the NativeArguments.
|
|
|
| __ mov(R1, Operand(R9)); // Pass the function entrypoint to call.
|
| @@ -220,7 +224,8 @@ void StubCode::GenerateCallBootstrapCFunctionStub(Assembler* assembler) {
|
| __ StoreToOffset(kWord, FP, THR, Thread::top_exit_frame_info_offset());
|
|
|
| #if defined(DEBUG)
|
| - { Label ok;
|
| + {
|
| + Label ok;
|
| // Check that we are always entering from Dart code.
|
| __ LoadFromOffset(kWord, R8, THR, Thread::vm_tag_offset());
|
| __ CompareImmediate(R8, VMTag::kDartTagId);
|
| @@ -260,7 +265,7 @@ void StubCode::GenerateCallBootstrapCFunctionStub(Assembler* assembler) {
|
| // Passing the structure by value as in runtime calls would require changing
|
| // Dart API for native functions.
|
| // For now, space is reserved on the stack and we pass a pointer to it.
|
| - __ stm(IA, SP, (1 << R0) | (1 << R1) | (1 << R2) | (1 << R3));
|
| + __ stm(IA, SP, (1 << R0) | (1 << R1) | (1 << R2) | (1 << R3));
|
| __ mov(R0, Operand(SP)); // Pass the pointer to the NativeArguments.
|
|
|
| // Call native function or redirection via simulator.
|
| @@ -463,8 +468,8 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
|
| }
|
|
|
| __ mov(R0, Operand(SP)); // Pass address of saved registers block.
|
| - bool is_lazy = (kind == kLazyDeoptFromReturn) ||
|
| - (kind == kLazyDeoptFromThrow);
|
| + bool is_lazy =
|
| + (kind == kLazyDeoptFromReturn) || (kind == kLazyDeoptFromThrow);
|
| __ mov(R1, Operand(is_lazy ? 1 : 0));
|
| __ ReserveAlignedFrameSpace(0);
|
| __ CallRuntime(kDeoptimizeCopyFrameRuntimeEntry, 2);
|
| @@ -645,7 +650,7 @@ void StubCode::GenerateAllocateArrayStub(Assembler* assembler) {
|
| // Compute the size to be allocated, it is based on the array length
|
| // and is computed as:
|
| // RoundedAllocationSize((array_length * kwordSize) + sizeof(RawArray)).
|
| - __ MoveRegister(R3, R2); // Array length.
|
| + __ MoveRegister(R3, R2); // Array length.
|
| // Check that length is a positive Smi.
|
| __ tst(R3, Operand(kSmiTagMask));
|
| if (FLAG_use_slow_path) {
|
| @@ -677,7 +682,7 @@ void StubCode::GenerateAllocateArrayStub(Assembler* assembler) {
|
| // Potential new object start.
|
| __ ldr(R0, Address(R8, Heap::TopOffset(space)));
|
| __ adds(NOTFP, R0, Operand(R9)); // Potential next object start.
|
| - __ b(&slow_case, CS); // Branch if unsigned overflow.
|
| + __ b(&slow_case, CS); // Branch if unsigned overflow.
|
|
|
| // Check if the allocation fits into the remaining space.
|
| // R0: potential new object start.
|
| @@ -715,14 +720,11 @@ void StubCode::GenerateAllocateArrayStub(Assembler* assembler) {
|
| // R0: new object start as a tagged pointer.
|
| // NOTFP: new object end address.
|
| // Store the type argument field.
|
| - __ StoreIntoObjectNoBarrier(R0,
|
| - FieldAddress(R0, Array::type_arguments_offset()),
|
| - R1);
|
| + __ StoreIntoObjectNoBarrier(
|
| + R0, FieldAddress(R0, Array::type_arguments_offset()), R1);
|
|
|
| // Set the length field.
|
| - __ StoreIntoObjectNoBarrier(R0,
|
| - FieldAddress(R0, Array::length_offset()),
|
| - R2);
|
| + __ StoreIntoObjectNoBarrier(R0, FieldAddress(R0, Array::length_offset()), R2);
|
|
|
| // Initialize all array elements to raw_null.
|
| // R0: new object start as a tagged pointer.
|
| @@ -1021,7 +1023,7 @@ void StubCode::GenerateUpdateStoreBufferStub(Assembler* assembler) {
|
| __ Bind(&add_to_buffer);
|
| // R2: Header word.
|
| if (TargetCPUFeatures::arm_version() == ARMv5TE) {
|
| - // TODO(21263): Implement 'swp' and use it below.
|
| +// TODO(21263): Implement 'swp' and use it below.
|
| #if !defined(USING_SIMULATOR)
|
| ASSERT(OS::NumberOfAvailableProcessors() <= 1);
|
| #endif
|
| @@ -1147,8 +1149,8 @@ void StubCode::GenerateAllocationStubForClass(Assembler* assembler,
|
| if ((end_offset - begin_offset) >= (2 * kWordSize)) {
|
| __ mov(R3, Operand(R2));
|
| }
|
| - __ InitializeFieldsNoBarrierUnrolled(R0, R0, begin_offset, end_offset,
|
| - R2, R3);
|
| + __ InitializeFieldsNoBarrierUnrolled(R0, R0, begin_offset, end_offset, R2,
|
| + R3);
|
| } else {
|
| // There are more than kInlineInstanceSize(12) fields
|
| __ add(R4, R0, Operand(Instance::NextFieldOffset() - kHeapObjectTag));
|
| @@ -1191,7 +1193,7 @@ void StubCode::GenerateAllocationStubForClass(Assembler* assembler,
|
| // calling into the runtime.
|
| __ EnterStubFrame(); // Uses pool pointer to pass cls to runtime.
|
| __ LoadObject(R2, Object::null_object());
|
| - __ Push(R2); // Setup space on stack for return value.
|
| + __ Push(R2); // Setup space on stack for return value.
|
| __ PushObject(cls); // Push class of object to be allocated.
|
| if (is_cls_parameterized) {
|
| // Push type arguments.
|
| @@ -1201,7 +1203,7 @@ void StubCode::GenerateAllocationStubForClass(Assembler* assembler,
|
| __ Push(R2);
|
| }
|
| __ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object.
|
| - __ Drop(2); // Pop arguments.
|
| + __ Drop(2); // Pop arguments.
|
| __ Pop(R0); // Pop result (newly allocated object).
|
| // R0: new object
|
| // Restore the frame pointer.
|
| @@ -1251,10 +1253,10 @@ void StubCode::GenerateOptimizedUsageCounterIncrement(Assembler* assembler) {
|
| if (FLAG_trace_optimized_ic_calls) {
|
| __ EnterStubFrame();
|
| __ PushList((1 << R9) | (1 << R8)); // Preserve.
|
| - __ Push(ic_reg); // Argument.
|
| - __ Push(func_reg); // Argument.
|
| + __ Push(ic_reg); // Argument.
|
| + __ Push(func_reg); // Argument.
|
| __ CallRuntime(kTraceICCallRuntimeEntry, 2);
|
| - __ Drop(2); // Discard argument;
|
| + __ Drop(2); // Discard argument;
|
| __ PopList((1 << R9) | (1 << R8)); // Restore.
|
| __ LeaveStubFrame();
|
| }
|
| @@ -1296,12 +1298,12 @@ static void EmitFastSmiOp(Assembler* assembler,
|
| __ b(not_smi_or_overflow, NE);
|
| switch (kind) {
|
| case Token::kADD: {
|
| - __ adds(R0, R1, Operand(R0)); // Adds.
|
| + __ adds(R0, R1, Operand(R0)); // Adds.
|
| __ b(not_smi_or_overflow, VS); // Branch if overflow.
|
| break;
|
| }
|
| case Token::kSUB: {
|
| - __ subs(R0, R1, Operand(R0)); // Subtract.
|
| + __ subs(R0, R1, Operand(R0)); // Subtract.
|
| __ b(not_smi_or_overflow, VS); // Branch if overflow.
|
| break;
|
| }
|
| @@ -1311,13 +1313,14 @@ static void EmitFastSmiOp(Assembler* assembler,
|
| __ LoadObject(R0, Bool::False(), NE);
|
| break;
|
| }
|
| - default: UNIMPLEMENTED();
|
| + default:
|
| + UNIMPLEMENTED();
|
| }
|
| // R9: IC data object (preserved).
|
| __ ldr(R8, FieldAddress(R9, ICData::ic_data_offset()));
|
| // R8: ic_data_array with check entries: classes and target functions.
|
| __ AddImmediate(R8, R8, Array::data_offset() - kHeapObjectTag);
|
| - // R8: points directly to the first ic data array element.
|
| +// R8: points directly to the first ic data array element.
|
| #if defined(DEBUG)
|
| // Check that first entry is for Smi/Smi.
|
| Label error, ok;
|
| @@ -1363,7 +1366,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
|
| __ CheckCodePointer();
|
| ASSERT(num_args > 0);
|
| #if defined(DEBUG)
|
| - { Label ok;
|
| + {
|
| + Label ok;
|
| // Check that the IC data array has NumArgsTested() == num_args.
|
| // 'NumArgsTested' is stored in the least significant bits of 'state_bits'.
|
| __ ldr(R8, FieldAddress(R9, ICData::state_bits_offset()));
|
| @@ -1443,7 +1447,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
|
|
|
| const intptr_t entry_size = ICData::TestEntryLengthFor(num_args) * kWordSize;
|
| __ AddImmediate(R8, entry_size); // Next entry.
|
| - __ ldr(R1, Address(R8, 0)); // Next class ID.
|
| + __ ldr(R1, Address(R8, 0)); // Next class ID.
|
|
|
| __ Bind(&test);
|
| __ CompareImmediate(R1, Smi::RawValue(kIllegalCid)); // Done?
|
| @@ -1529,42 +1533,37 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
|
| // - 1 target function.
|
| void StubCode::GenerateOneArgCheckInlineCacheStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| - GenerateNArgsCheckInlineCacheStub(assembler,
|
| - 1,
|
| - kInlineCacheMissHandlerOneArgRuntimeEntry,
|
| - Token::kILLEGAL);
|
| + GenerateNArgsCheckInlineCacheStub(
|
| + assembler, 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL);
|
| }
|
|
|
|
|
| void StubCode::GenerateTwoArgsCheckInlineCacheStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| - GenerateNArgsCheckInlineCacheStub(assembler,
|
| - 2,
|
| - kInlineCacheMissHandlerTwoArgsRuntimeEntry,
|
| - Token::kILLEGAL);
|
| + GenerateNArgsCheckInlineCacheStub(assembler, 2,
|
| + kInlineCacheMissHandlerTwoArgsRuntimeEntry,
|
| + Token::kILLEGAL);
|
| }
|
|
|
|
|
| void StubCode::GenerateSmiAddInlineCacheStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| - GenerateNArgsCheckInlineCacheStub(assembler,
|
| - 2,
|
| - kInlineCacheMissHandlerTwoArgsRuntimeEntry,
|
| - Token::kADD);
|
| + GenerateNArgsCheckInlineCacheStub(
|
| + assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD);
|
| }
|
|
|
|
|
| void StubCode::GenerateSmiSubInlineCacheStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| - GenerateNArgsCheckInlineCacheStub(assembler, 2,
|
| - kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB);
|
| + GenerateNArgsCheckInlineCacheStub(
|
| + assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kSUB);
|
| }
|
|
|
|
|
| void StubCode::GenerateSmiEqualInlineCacheStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| - GenerateNArgsCheckInlineCacheStub(assembler, 2,
|
| - kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ);
|
| + GenerateNArgsCheckInlineCacheStub(
|
| + assembler, 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ);
|
| }
|
|
|
|
|
| @@ -1572,8 +1571,8 @@ void StubCode::GenerateOneArgOptimizedCheckInlineCacheStub(
|
| Assembler* assembler) {
|
| GenerateOptimizedUsageCounterIncrement(assembler);
|
| GenerateNArgsCheckInlineCacheStub(assembler, 1,
|
| - kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
|
| - true /* optimized */);
|
| + kInlineCacheMissHandlerOneArgRuntimeEntry,
|
| + Token::kILLEGAL, true /* optimized */);
|
| }
|
|
|
|
|
| @@ -1581,8 +1580,8 @@ void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub(
|
| Assembler* assembler) {
|
| GenerateOptimizedUsageCounterIncrement(assembler);
|
| GenerateNArgsCheckInlineCacheStub(assembler, 2,
|
| - kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
|
| - true /* optimized */);
|
| + kInlineCacheMissHandlerTwoArgsRuntimeEntry,
|
| + Token::kILLEGAL, true /* optimized */);
|
| }
|
|
|
|
|
| @@ -1592,7 +1591,8 @@ void StubCode::GenerateTwoArgsOptimizedCheckInlineCacheStub(
|
| void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| #if defined(DEBUG)
|
| - { Label ok;
|
| + {
|
| + Label ok;
|
| // Check that the IC data array has NumArgsTested() == 0.
|
| // 'NumArgsTested' is stored in the least significant bits of 'state_bits'.
|
| __ ldr(R8, FieldAddress(R9, ICData::state_bits_offset()));
|
| @@ -1662,8 +1662,8 @@ void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
|
|
|
| void StubCode::GenerateTwoArgsUnoptimizedStaticCallStub(Assembler* assembler) {
|
| GenerateUsageCounterIncrement(assembler, R8);
|
| - GenerateNArgsCheckInlineCacheStub(assembler, 2,
|
| - kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL);
|
| + GenerateNArgsCheckInlineCacheStub(
|
| + assembler, 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL);
|
| }
|
|
|
|
|
| @@ -1677,7 +1677,7 @@ void StubCode::GenerateLazyCompileStub(Assembler* assembler) {
|
| __ PushList((1 << R4) | (1 << R9));
|
| __ Push(R0); // Pass function.
|
| __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
|
| - __ Pop(R0); // Restore argument.
|
| + __ Pop(R0); // Restore argument.
|
| __ PopList((1 << R4) | (1 << R9)); // Restore arg desc. and IC data.
|
| __ LeaveStubFrame();
|
|
|
| @@ -1716,8 +1716,7 @@ void StubCode::GenerateRuntimeCallBreakpointStub(Assembler* assembler) {
|
|
|
|
|
| // Called only from unoptimized code. All relevant registers have been saved.
|
| -void StubCode::GenerateDebugStepCheckStub(
|
| - Assembler* assembler) {
|
| +void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) {
|
| // Check single stepping.
|
| Label stepping, done_stepping;
|
| __ LoadIsolate(R1);
|
| @@ -1749,8 +1748,8 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
|
| // Compute instance type arguments into R4.
|
| Label has_no_type_arguments;
|
| __ LoadObject(R4, Object::null_object());
|
| - __ ldr(R9, FieldAddress(R3,
|
| - Class::type_arguments_field_offset_in_words_offset()));
|
| + __ ldr(R9, FieldAddress(
|
| + R3, Class::type_arguments_field_offset_in_words_offset()));
|
| __ CompareImmediate(R9, Class::kNoTypeArguments);
|
| __ b(&has_no_type_arguments, EQ);
|
| __ add(R9, R0, Operand(R9, LSL, 2));
|
| @@ -1792,7 +1791,7 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
|
| } else {
|
| __ b(&next_iteration, NE);
|
| __ ldr(R9, Address(R2, kWordSize *
|
| - SubtypeTestCache::kInstantiatorTypeArguments));
|
| + SubtypeTestCache::kInstantiatorTypeArguments));
|
| __ cmp(R9, Operand(R1));
|
| __ b(&found, EQ);
|
| }
|
| @@ -1863,13 +1862,13 @@ void StubCode::GenerateGetStackPointerStub(Assembler* assembler) {
|
| void StubCode::GenerateJumpToExceptionHandlerStub(Assembler* assembler) {
|
| ASSERT(kExceptionObjectReg == R0);
|
| ASSERT(kStackTraceObjectReg == R1);
|
| - __ mov(IP, Operand(R1)); // Copy Stack pointer into IP.
|
| - __ mov(LR, Operand(R0)); // Program counter.
|
| - __ mov(R0, Operand(R3)); // Exception object.
|
| - __ ldr(R1, Address(SP, 0)); // StackTrace object.
|
| + __ mov(IP, Operand(R1)); // Copy Stack pointer into IP.
|
| + __ mov(LR, Operand(R0)); // Program counter.
|
| + __ mov(R0, Operand(R3)); // Exception object.
|
| + __ ldr(R1, Address(SP, 0)); // StackTrace object.
|
| __ ldr(THR, Address(SP, 4)); // Thread.
|
| - __ mov(FP, Operand(R2)); // Frame_pointer.
|
| - __ mov(SP, Operand(IP)); // Set Stack pointer.
|
| + __ mov(FP, Operand(R2)); // Frame_pointer.
|
| + __ mov(SP, Operand(IP)); // Set Stack pointer.
|
| // Set the tag.
|
| __ LoadImmediate(R2, VMTag::kDartTagId);
|
| __ StoreToOffset(kWord, R2, THR, Thread::vm_tag_offset());
|
| @@ -1958,7 +1957,7 @@ static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
|
| __ b(&done, NE);
|
| __ EnterStubFrame();
|
| __ ReserveAlignedFrameSpace(2 * kWordSize);
|
| - __ stm(IA, SP, (1 << R0) | (1 << R1));
|
| + __ stm(IA, SP, (1 << R0) | (1 << R1));
|
| __ CallRuntime(kBigintCompareRuntimeEntry, 2);
|
| // Result in R0, 0 means equal.
|
| __ LeaveStubFrame();
|
|
|