| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 311 usat(dst, satpos, src, cond); | 311 usat(dst, satpos, src, cond); |
| 312 } | 312 } |
| 313 } | 313 } |
| 314 | 314 |
| 315 | 315 |
| 316 void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) { | 316 void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) { |
| 317 // Empty the const pool. | 317 // Empty the const pool. |
| 318 CheckConstPool(true, true); | 318 CheckConstPool(true, true); |
| 319 add(pc, pc, Operand(index, | 319 add(pc, pc, Operand(index, |
| 320 LSL, | 320 LSL, |
| 321 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize)); | 321 Instruction::kInstrSizeLog2 - kSmiTagSize)); |
| 322 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize); | 322 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * kInstrSize); |
| 323 nop(); // Jump table alignment. | 323 nop(); // Jump table alignment. |
| 324 for (int i = 0; i < targets.length(); i++) { | 324 for (int i = 0; i < targets.length(); i++) { |
| 325 b(targets[i]); | 325 b(targets[i]); |
| 326 } | 326 } |
| 327 } | 327 } |
| 328 | 328 |
| 329 | 329 |
| 330 void MacroAssembler::LoadRoot(Register destination, | 330 void MacroAssembler::LoadRoot(Register destination, |
| 331 Heap::RootListIndex index, | 331 Heap::RootListIndex index, |
| (...skipping 30 matching lines...) Expand all Loading... |
| 362 // Mark region dirty. | 362 // Mark region dirty. |
| 363 ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | 363 ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset)); |
| 364 mov(ip, Operand(1)); | 364 mov(ip, Operand(1)); |
| 365 orr(scratch, scratch, Operand(ip, LSL, address)); | 365 orr(scratch, scratch, Operand(ip, LSL, address)); |
| 366 str(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | 366 str(scratch, MemOperand(object, Page::kDirtyFlagOffset)); |
| 367 } | 367 } |
| 368 | 368 |
| 369 | 369 |
| 370 void MacroAssembler::InNewSpace(Register object, | 370 void MacroAssembler::InNewSpace(Register object, |
| 371 Register scratch, | 371 Register scratch, |
| 372 Condition cc, | 372 Condition cond, |
| 373 Label* branch) { | 373 Label* branch) { |
| 374 ASSERT(cc == eq || cc == ne); | 374 ASSERT(cond == eq || cond == ne); |
| 375 and_(scratch, object, Operand(ExternalReference::new_space_mask())); | 375 and_(scratch, object, Operand(ExternalReference::new_space_mask())); |
| 376 cmp(scratch, Operand(ExternalReference::new_space_start())); | 376 cmp(scratch, Operand(ExternalReference::new_space_start())); |
| 377 b(cc, branch); | 377 b(cond, branch); |
| 378 } | 378 } |
| 379 | 379 |
| 380 | 380 |
| 381 // Will clobber 4 registers: object, offset, scratch, ip. The | 381 // Will clobber 4 registers: object, offset, scratch, ip. The |
| 382 // register 'object' contains a heap object pointer. The heap object | 382 // register 'object' contains a heap object pointer. The heap object |
| 383 // tag is shifted away. | 383 // tag is shifted away. |
| 384 void MacroAssembler::RecordWrite(Register object, | 384 void MacroAssembler::RecordWrite(Register object, |
| 385 Operand offset, | 385 Operand offset, |
| 386 Register scratch0, | 386 Register scratch0, |
| 387 Register scratch1) { | 387 Register scratch1) { |
| (...skipping 531 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 919 | 919 |
| 920 | 920 |
| 921 void MacroAssembler::IsObjectJSStringType(Register object, | 921 void MacroAssembler::IsObjectJSStringType(Register object, |
| 922 Register scratch, | 922 Register scratch, |
| 923 Label* fail) { | 923 Label* fail) { |
| 924 ASSERT(kNotStringTag != 0); | 924 ASSERT(kNotStringTag != 0); |
| 925 | 925 |
| 926 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 926 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 927 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 927 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 928 tst(scratch, Operand(kIsNotStringMask)); | 928 tst(scratch, Operand(kIsNotStringMask)); |
| 929 b(nz, fail); | 929 b(ne, fail); |
| 930 } | 930 } |
| 931 | 931 |
| 932 | 932 |
| 933 #ifdef ENABLE_DEBUGGER_SUPPORT | 933 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 934 void MacroAssembler::DebugBreak() { | 934 void MacroAssembler::DebugBreak() { |
| 935 ASSERT(allow_stub_calls()); | 935 ASSERT(allow_stub_calls()); |
| 936 mov(r0, Operand(0, RelocInfo::NONE)); | 936 mov(r0, Operand(0, RelocInfo::NONE)); |
| 937 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak))); | 937 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak))); |
| 938 CEntryStub ces(1); | 938 CEntryStub ces(1); |
| 939 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); | 939 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); |
| (...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1799 ASSERT(value > 0); | 1799 ASSERT(value > 0); |
| 1800 if (FLAG_native_code_counters && counter->Enabled()) { | 1800 if (FLAG_native_code_counters && counter->Enabled()) { |
| 1801 mov(scratch2, Operand(ExternalReference(counter))); | 1801 mov(scratch2, Operand(ExternalReference(counter))); |
| 1802 ldr(scratch1, MemOperand(scratch2)); | 1802 ldr(scratch1, MemOperand(scratch2)); |
| 1803 sub(scratch1, scratch1, Operand(value)); | 1803 sub(scratch1, scratch1, Operand(value)); |
| 1804 str(scratch1, MemOperand(scratch2)); | 1804 str(scratch1, MemOperand(scratch2)); |
| 1805 } | 1805 } |
| 1806 } | 1806 } |
| 1807 | 1807 |
| 1808 | 1808 |
| 1809 void MacroAssembler::Assert(Condition cc, const char* msg) { | 1809 void MacroAssembler::Assert(Condition cond, const char* msg) { |
| 1810 if (FLAG_debug_code) | 1810 if (FLAG_debug_code) |
| 1811 Check(cc, msg); | 1811 Check(cond, msg); |
| 1812 } | 1812 } |
| 1813 | 1813 |
| 1814 | 1814 |
| 1815 void MacroAssembler::AssertRegisterIsRoot(Register reg, | 1815 void MacroAssembler::AssertRegisterIsRoot(Register reg, |
| 1816 Heap::RootListIndex index) { | 1816 Heap::RootListIndex index) { |
| 1817 if (FLAG_debug_code) { | 1817 if (FLAG_debug_code) { |
| 1818 LoadRoot(ip, index); | 1818 LoadRoot(ip, index); |
| 1819 cmp(reg, ip); | 1819 cmp(reg, ip); |
| 1820 Check(eq, "Register did not match expected root"); | 1820 Check(eq, "Register did not match expected root"); |
| 1821 } | 1821 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1834 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); | 1834 LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex); |
| 1835 cmp(elements, ip); | 1835 cmp(elements, ip); |
| 1836 b(eq, &ok); | 1836 b(eq, &ok); |
| 1837 Abort("JSObject with fast elements map has slow elements"); | 1837 Abort("JSObject with fast elements map has slow elements"); |
| 1838 bind(&ok); | 1838 bind(&ok); |
| 1839 pop(elements); | 1839 pop(elements); |
| 1840 } | 1840 } |
| 1841 } | 1841 } |
| 1842 | 1842 |
| 1843 | 1843 |
| 1844 void MacroAssembler::Check(Condition cc, const char* msg) { | 1844 void MacroAssembler::Check(Condition cond, const char* msg) { |
| 1845 Label L; | 1845 Label L; |
| 1846 b(cc, &L); | 1846 b(cond, &L); |
| 1847 Abort(msg); | 1847 Abort(msg); |
| 1848 // will not return here | 1848 // will not return here |
| 1849 bind(&L); | 1849 bind(&L); |
| 1850 } | 1850 } |
| 1851 | 1851 |
| 1852 | 1852 |
| 1853 void MacroAssembler::Abort(const char* msg) { | 1853 void MacroAssembler::Abort(const char* msg) { |
| 1854 Label abort_start; | 1854 Label abort_start; |
| 1855 bind(&abort_start); | 1855 bind(&abort_start); |
| 1856 // We want to pass the msg string like a smi to avoid GC | 1856 // We want to pass the msg string like a smi to avoid GC |
| (...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2256 | 2256 |
| 2257 void CodePatcher::Emit(Address addr) { | 2257 void CodePatcher::Emit(Address addr) { |
| 2258 masm()->emit(reinterpret_cast<Instr>(addr)); | 2258 masm()->emit(reinterpret_cast<Instr>(addr)); |
| 2259 } | 2259 } |
| 2260 #endif // ENABLE_DEBUGGER_SUPPORT | 2260 #endif // ENABLE_DEBUGGER_SUPPORT |
| 2261 | 2261 |
| 2262 | 2262 |
| 2263 } } // namespace v8::internal | 2263 } } // namespace v8::internal |
| 2264 | 2264 |
| 2265 #endif // V8_TARGET_ARCH_ARM | 2265 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |