OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
191 } | 191 } |
192 | 192 |
193 if (info()->saves_caller_doubles()) { | 193 if (info()->saves_caller_doubles()) { |
194 SaveCallerDoubles(); | 194 SaveCallerDoubles(); |
195 } | 195 } |
196 | 196 |
197 // Possibly allocate a local context. | 197 // Possibly allocate a local context. |
198 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 198 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
199 if (heap_slots > 0) { | 199 if (heap_slots > 0) { |
200 Comment(";;; Allocate local context"); | 200 Comment(";;; Allocate local context"); |
| 201 bool need_write_barrier = true; |
201 // Argument to NewContext is the function, which is in a1. | 202 // Argument to NewContext is the function, which is in a1. |
202 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 203 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
203 FastNewContextStub stub(isolate(), heap_slots); | 204 FastNewContextStub stub(isolate(), heap_slots); |
204 __ CallStub(&stub); | 205 __ CallStub(&stub); |
| 206 // Result of FastNewContextStub is always in new space. |
| 207 need_write_barrier = false; |
205 } else { | 208 } else { |
206 __ push(a1); | 209 __ push(a1); |
207 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); | 210 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
208 } | 211 } |
209 RecordSafepoint(Safepoint::kNoLazyDeopt); | 212 RecordSafepoint(Safepoint::kNoLazyDeopt); |
210 // Context is returned in both v0. It replaces the context passed to us. | 213 // Context is returned in both v0. It replaces the context passed to us. |
211 // It's saved in the stack and kept live in cp. | 214 // It's saved in the stack and kept live in cp. |
212 __ mov(cp, v0); | 215 __ mov(cp, v0); |
213 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 216 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
214 // Copy any necessary parameters into the context. | 217 // Copy any necessary parameters into the context. |
215 int num_parameters = scope()->num_parameters(); | 218 int num_parameters = scope()->num_parameters(); |
216 for (int i = 0; i < num_parameters; i++) { | 219 for (int i = 0; i < num_parameters; i++) { |
217 Variable* var = scope()->parameter(i); | 220 Variable* var = scope()->parameter(i); |
218 if (var->IsContextSlot()) { | 221 if (var->IsContextSlot()) { |
219 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 222 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
220 (num_parameters - 1 - i) * kPointerSize; | 223 (num_parameters - 1 - i) * kPointerSize; |
221 // Load parameter from stack. | 224 // Load parameter from stack. |
222 __ lw(a0, MemOperand(fp, parameter_offset)); | 225 __ lw(a0, MemOperand(fp, parameter_offset)); |
223 // Store it in the context. | 226 // Store it in the context. |
224 MemOperand target = ContextOperand(cp, var->index()); | 227 MemOperand target = ContextOperand(cp, var->index()); |
225 __ sw(a0, target); | 228 __ sw(a0, target); |
226 // Update the write barrier. This clobbers a3 and a0. | 229 // Update the write barrier. This clobbers a3 and a0. |
227 __ RecordWriteContextSlot( | 230 if (need_write_barrier) { |
228 cp, target.offset(), a0, a3, GetRAState(), kSaveFPRegs); | 231 __ RecordWriteContextSlot( |
| 232 cp, target.offset(), a0, a3, GetRAState(), kSaveFPRegs); |
| 233 } else if (FLAG_debug_code) { |
| 234 Label done; |
| 235 __ JumpIfInNewSpace(cp, a0, &done); |
| 236 __ Abort(kExpectedNewSpaceObject); |
| 237 __ bind(&done); |
| 238 } |
229 } | 239 } |
230 } | 240 } |
231 Comment(";;; End allocate local context"); | 241 Comment(";;; End allocate local context"); |
232 } | 242 } |
233 | 243 |
234 // Trace the call. | 244 // Trace the call. |
235 if (FLAG_trace && info()->IsOptimizing()) { | 245 if (FLAG_trace && info()->IsOptimizing()) { |
236 // We have not executed any compiled code yet, so cp still holds the | 246 // We have not executed any compiled code yet, so cp still holds the |
237 // incoming context. | 247 // incoming context. |
238 __ CallRuntime(Runtime::kTraceEnter, 0); | 248 __ CallRuntime(Runtime::kTraceEnter, 0); |
(...skipping 5639 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5878 __ lw(result, FieldMemOperand(scratch, | 5888 __ lw(result, FieldMemOperand(scratch, |
5879 FixedArray::kHeaderSize - kPointerSize)); | 5889 FixedArray::kHeaderSize - kPointerSize)); |
5880 __ bind(deferred->exit()); | 5890 __ bind(deferred->exit()); |
5881 __ bind(&done); | 5891 __ bind(&done); |
5882 } | 5892 } |
5883 | 5893 |
5884 | 5894 |
5885 #undef __ | 5895 #undef __ |
5886 | 5896 |
5887 } } // namespace v8::internal | 5897 } } // namespace v8::internal |
OLD | NEW |