OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "arm/lithium-codegen-arm.h" | 7 #include "arm/lithium-codegen-arm.h" |
8 #include "arm/lithium-gap-resolver-arm.h" | 8 #include "arm/lithium-gap-resolver-arm.h" |
9 #include "code-stubs.h" | 9 #include "code-stubs.h" |
10 #include "stub-cache.h" | 10 #include "stub-cache.h" |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
172 } | 172 } |
173 | 173 |
174 if (info()->saves_caller_doubles()) { | 174 if (info()->saves_caller_doubles()) { |
175 SaveCallerDoubles(); | 175 SaveCallerDoubles(); |
176 } | 176 } |
177 | 177 |
178 // Possibly allocate a local context. | 178 // Possibly allocate a local context. |
179 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 179 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
180 if (heap_slots > 0) { | 180 if (heap_slots > 0) { |
181 Comment(";;; Allocate local context"); | 181 Comment(";;; Allocate local context"); |
| 182 bool need_write_barrier = true; |
182 // Argument to NewContext is the function, which is in r1. | 183 // Argument to NewContext is the function, which is in r1. |
183 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 184 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
184 FastNewContextStub stub(isolate(), heap_slots); | 185 FastNewContextStub stub(isolate(), heap_slots); |
185 __ CallStub(&stub); | 186 __ CallStub(&stub); |
| 187 // Result of FastNewContextStub is always in new space. |
| 188 need_write_barrier = false; |
186 } else { | 189 } else { |
187 __ push(r1); | 190 __ push(r1); |
188 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); | 191 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
189 } | 192 } |
190 RecordSafepoint(Safepoint::kNoLazyDeopt); | 193 RecordSafepoint(Safepoint::kNoLazyDeopt); |
191 // Context is returned in both r0 and cp. It replaces the context | 194 // Context is returned in both r0 and cp. It replaces the context |
192 // passed to us. It's saved in the stack and kept live in cp. | 195 // passed to us. It's saved in the stack and kept live in cp. |
193 __ mov(cp, r0); | 196 __ mov(cp, r0); |
194 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 197 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
195 // Copy any necessary parameters into the context. | 198 // Copy any necessary parameters into the context. |
196 int num_parameters = scope()->num_parameters(); | 199 int num_parameters = scope()->num_parameters(); |
197 for (int i = 0; i < num_parameters; i++) { | 200 for (int i = 0; i < num_parameters; i++) { |
198 Variable* var = scope()->parameter(i); | 201 Variable* var = scope()->parameter(i); |
199 if (var->IsContextSlot()) { | 202 if (var->IsContextSlot()) { |
200 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 203 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
201 (num_parameters - 1 - i) * kPointerSize; | 204 (num_parameters - 1 - i) * kPointerSize; |
202 // Load parameter from stack. | 205 // Load parameter from stack. |
203 __ ldr(r0, MemOperand(fp, parameter_offset)); | 206 __ ldr(r0, MemOperand(fp, parameter_offset)); |
204 // Store it in the context. | 207 // Store it in the context. |
205 MemOperand target = ContextOperand(cp, var->index()); | 208 MemOperand target = ContextOperand(cp, var->index()); |
206 __ str(r0, target); | 209 __ str(r0, target); |
207 // Update the write barrier. This clobbers r3 and r0. | 210 // Update the write barrier. This clobbers r3 and r0. |
208 __ RecordWriteContextSlot( | 211 if (need_write_barrier) { |
209 cp, | 212 __ RecordWriteContextSlot( |
210 target.offset(), | 213 cp, |
211 r0, | 214 target.offset(), |
212 r3, | 215 r0, |
213 GetLinkRegisterState(), | 216 r3, |
214 kSaveFPRegs); | 217 GetLinkRegisterState(), |
| 218 kSaveFPRegs); |
| 219 } else if (FLAG_debug_code) { |
| 220 Label done; |
| 221 __ JumpIfInNewSpace(cp, r0, &done); |
| 222 __ Abort(kExpectedNewSpaceObject); |
| 223 __ bind(&done); |
| 224 } |
215 } | 225 } |
216 } | 226 } |
217 Comment(";;; End allocate local context"); | 227 Comment(";;; End allocate local context"); |
218 } | 228 } |
219 | 229 |
220 // Trace the call. | 230 // Trace the call. |
221 if (FLAG_trace && info()->IsOptimizing()) { | 231 if (FLAG_trace && info()->IsOptimizing()) { |
222 // We have not executed any compiled code yet, so cp still holds the | 232 // We have not executed any compiled code yet, so cp still holds the |
223 // incoming context. | 233 // incoming context. |
224 __ CallRuntime(Runtime::kTraceEnter, 0); | 234 __ CallRuntime(Runtime::kTraceEnter, 0); |
(...skipping 5596 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5821 __ ldr(result, FieldMemOperand(scratch, | 5831 __ ldr(result, FieldMemOperand(scratch, |
5822 FixedArray::kHeaderSize - kPointerSize)); | 5832 FixedArray::kHeaderSize - kPointerSize)); |
5823 __ bind(deferred->exit()); | 5833 __ bind(deferred->exit()); |
5824 __ bind(&done); | 5834 __ bind(&done); |
5825 } | 5835 } |
5826 | 5836 |
5827 | 5837 |
5828 #undef __ | 5838 #undef __ |
5829 | 5839 |
5830 } } // namespace v8::internal | 5840 } } // namespace v8::internal |
OLD | NEW |