| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 418 } | 418 } |
| 419 } | 419 } |
| 420 | 420 |
| 421 | 421 |
| 422 void MacroAssembler::LoadRoot(Register destination, | 422 void MacroAssembler::LoadRoot(Register destination, |
| 423 Heap::RootListIndex index, | 423 Heap::RootListIndex index, |
| 424 Condition cond) { | 424 Condition cond) { |
| 425 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | 425 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && |
| 426 !Heap::RootCanBeWrittenAfterInitialization(index) && | 426 !Heap::RootCanBeWrittenAfterInitialization(index) && |
| 427 !predictable_code_size()) { | 427 !predictable_code_size()) { |
| 428 Handle<Object> root(isolate()->heap()->roots_array_start()[index]); | 428 Handle<Object> root(isolate()->heap()->roots_array_start()[index], |
| 429 isolate()); |
| 429 if (!isolate()->heap()->InNewSpace(*root)) { | 430 if (!isolate()->heap()->InNewSpace(*root)) { |
| 430 // The CPU supports fast immediate values, and this root will never | 431 // The CPU supports fast immediate values, and this root will never |
| 431 // change. We will load it as a relocatable immediate value. | 432 // change. We will load it as a relocatable immediate value. |
| 432 mov(destination, Operand(root), LeaveCC, cond); | 433 mov(destination, Operand(root), LeaveCC, cond); |
| 433 return; | 434 return; |
| 434 } | 435 } |
| 435 } | 436 } |
| 436 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); | 437 ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond); |
| 437 } | 438 } |
| 438 | 439 |
| (...skipping 1795 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2234 | 2235 |
| 2235 | 2236 |
| 2236 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 2237 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| 2237 return ref0.address() - ref1.address(); | 2238 return ref0.address() - ref1.address(); |
| 2238 } | 2239 } |
| 2239 | 2240 |
| 2240 | 2241 |
| 2241 void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function, | 2242 void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function, |
| 2242 int stack_space) { | 2243 int stack_space) { |
| 2243 ExternalReference next_address = | 2244 ExternalReference next_address = |
| 2244 ExternalReference::handle_scope_next_address(); | 2245 ExternalReference::handle_scope_next_address(isolate()); |
| 2245 const int kNextOffset = 0; | 2246 const int kNextOffset = 0; |
| 2246 const int kLimitOffset = AddressOffset( | 2247 const int kLimitOffset = AddressOffset( |
| 2247 ExternalReference::handle_scope_limit_address(), | 2248 ExternalReference::handle_scope_limit_address(isolate()), |
| 2248 next_address); | 2249 next_address); |
| 2249 const int kLevelOffset = AddressOffset( | 2250 const int kLevelOffset = AddressOffset( |
| 2250 ExternalReference::handle_scope_level_address(), | 2251 ExternalReference::handle_scope_level_address(isolate()), |
| 2251 next_address); | 2252 next_address); |
| 2252 | 2253 |
| 2253 // Allocate HandleScope in callee-save registers. | 2254 // Allocate HandleScope in callee-save registers. |
| 2254 mov(r7, Operand(next_address)); | 2255 mov(r7, Operand(next_address)); |
| 2255 ldr(r4, MemOperand(r7, kNextOffset)); | 2256 ldr(r4, MemOperand(r7, kNextOffset)); |
| 2256 ldr(r5, MemOperand(r7, kLimitOffset)); | 2257 ldr(r5, MemOperand(r7, kLimitOffset)); |
| 2257 ldr(r6, MemOperand(r7, kLevelOffset)); | 2258 ldr(r6, MemOperand(r7, kLevelOffset)); |
| 2258 add(r6, r6, Operand(1)); | 2259 add(r6, r6, Operand(1)); |
| 2259 str(r6, MemOperand(r7, kLevelOffset)); | 2260 str(r6, MemOperand(r7, kLevelOffset)); |
| 2260 | 2261 |
| (...skipping 1729 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3990 void CodePatcher::EmitCondition(Condition cond) { | 3991 void CodePatcher::EmitCondition(Condition cond) { |
| 3991 Instr instr = Assembler::instr_at(masm_.pc_); | 3992 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3992 instr = (instr & ~kCondMask) | cond; | 3993 instr = (instr & ~kCondMask) | cond; |
| 3993 masm_.emit(instr); | 3994 masm_.emit(instr); |
| 3994 } | 3995 } |
| 3995 | 3996 |
| 3996 | 3997 |
| 3997 } } // namespace v8::internal | 3998 } } // namespace v8::internal |
| 3998 | 3999 |
| 3999 #endif // V8_TARGET_ARCH_ARM | 4000 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |