OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
103 RelocInfo::CODE_TARGET); | 103 RelocInfo::CODE_TARGET); |
104 } | 104 } |
105 | 105 |
106 | 106 |
107 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 107 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
108 bool is_api_function, | 108 bool is_api_function, |
109 bool count_constructions) { | 109 bool count_constructions) { |
110 // Should never count constructions for api objects. | 110 // Should never count constructions for api objects. |
111 ASSERT(!is_api_function || !count_constructions); | 111 ASSERT(!is_api_function || !count_constructions); |
112 | 112 |
113 // Enter a construct frame. | 113 // Enter a construct frame. |
114 __ EnterConstructFrame(); | 114 { |
115 | 115 FrameScope scope(masm, StackFrame::CONSTRUCT); |
116 // Store a smi-tagged arguments count on the stack. | 116 |
117 __ Integer32ToSmi(rax, rax); | 117 // Store a smi-tagged arguments count on the stack. |
118 __ push(rax); | 118 __ Integer32ToSmi(rax, rax); |
119 | 119 __ push(rax); |
120 // Push the function to invoke on the stack. | 120 |
121 __ push(rdi); | 121 // Push the function to invoke on the stack. |
122 | 122 __ push(rdi); |
123 // Try to allocate the object without transitioning into C code. If any of the | 123 |
124 // preconditions is not met, the code bails out to the runtime call. | 124 // Try to allocate the object without transitioning into C code. If any of |
125 Label rt_call, allocated; | 125 // the preconditions is not met, the code bails out to the runtime call. |
126 if (FLAG_inline_new) { | 126 Label rt_call, allocated; |
127 Label undo_allocation; | 127 if (FLAG_inline_new) { |
| 128 Label undo_allocation; |
128 | 129 |
129 #ifdef ENABLE_DEBUGGER_SUPPORT | 130 #ifdef ENABLE_DEBUGGER_SUPPORT |
130 ExternalReference debug_step_in_fp = | 131 ExternalReference debug_step_in_fp = |
131 ExternalReference::debug_step_in_fp_address(masm->isolate()); | 132 ExternalReference::debug_step_in_fp_address(masm->isolate()); |
132 __ movq(kScratchRegister, debug_step_in_fp); | 133 __ movq(kScratchRegister, debug_step_in_fp); |
133 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); | 134 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); |
134 __ j(not_equal, &rt_call); | 135 __ j(not_equal, &rt_call); |
135 #endif | 136 #endif |
136 | 137 |
137 // Verified that the constructor is a JSFunction. | 138 // Verified that the constructor is a JSFunction. |
138 // Load the initial map and verify that it is in fact a map. | 139 // Load the initial map and verify that it is in fact a map. |
139 // rdi: constructor | 140 // rdi: constructor |
140 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 141 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
141 // Will both indicate a NULL and a Smi | 142 // Will both indicate a NULL and a Smi |
142 STATIC_ASSERT(kSmiTag == 0); | 143 ASSERT(kSmiTag == 0); |
143 __ JumpIfSmi(rax, &rt_call); | 144 __ JumpIfSmi(rax, &rt_call); |
144 // rdi: constructor | 145 // rdi: constructor |
145 // rax: initial map (if proven valid below) | 146 // rax: initial map (if proven valid below) |
146 __ CmpObjectType(rax, MAP_TYPE, rbx); | 147 __ CmpObjectType(rax, MAP_TYPE, rbx); |
147 __ j(not_equal, &rt_call); | 148 __ j(not_equal, &rt_call); |
148 | 149 |
149 // Check that the constructor is not constructing a JSFunction (see comments | 150 // Check that the constructor is not constructing a JSFunction (see |
150 // in Runtime_NewObject in runtime.cc). In which case the initial map's | 151 // comments in Runtime_NewObject in runtime.cc). In which case the |
151 // instance type would be JS_FUNCTION_TYPE. | 152 // initial map's instance type would be JS_FUNCTION_TYPE. |
152 // rdi: constructor | 153 // rdi: constructor |
153 // rax: initial map | 154 // rax: initial map |
154 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); | 155 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); |
155 __ j(equal, &rt_call); | 156 __ j(equal, &rt_call); |
156 | 157 |
157 if (count_constructions) { | 158 if (count_constructions) { |
158 Label allocate; | 159 Label allocate; |
159 // Decrease generous allocation count. | 160 // Decrease generous allocation count. |
160 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 161 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
161 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset)); | 162 __ decb(FieldOperand(rcx, |
162 __ j(not_zero, &allocate); | 163 SharedFunctionInfo::kConstructionCountOffset)); |
163 | 164 __ j(not_zero, &allocate); |
164 __ push(rax); | 165 |
165 __ push(rdi); | 166 __ push(rax); |
166 | 167 __ push(rdi); |
167 __ push(rdi); // constructor | 168 |
168 // The call will replace the stub, so the countdown is only done once. | 169 __ push(rdi); // constructor |
169 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); | 170 // The call will replace the stub, so the countdown is only done once. |
170 | 171 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); |
171 __ pop(rdi); | 172 |
172 __ pop(rax); | 173 __ pop(rdi); |
173 | 174 __ pop(rax); |
174 __ bind(&allocate); | 175 |
| 176 __ bind(&allocate); |
| 177 } |
| 178 |
| 179 // Now allocate the JSObject on the heap. |
| 180 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); |
| 181 __ shl(rdi, Immediate(kPointerSizeLog2)); |
| 182 // rdi: size of new object |
| 183 __ AllocateInNewSpace(rdi, |
| 184 rbx, |
| 185 rdi, |
| 186 no_reg, |
| 187 &rt_call, |
| 188 NO_ALLOCATION_FLAGS); |
| 189 // Allocated the JSObject, now initialize the fields. |
| 190 // rax: initial map |
| 191 // rbx: JSObject (not HeapObject tagged - the actual address). |
| 192 // rdi: start of next object |
| 193 __ movq(Operand(rbx, JSObject::kMapOffset), rax); |
| 194 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); |
| 195 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx); |
| 196 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx); |
| 197 // Set extra fields in the newly allocated object. |
| 198 // rax: initial map |
| 199 // rbx: JSObject |
| 200 // rdi: start of next object |
| 201 { Label loop, entry; |
| 202 // To allow for truncation. |
| 203 if (count_constructions) { |
| 204 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); |
| 205 } else { |
| 206 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
| 207 } |
| 208 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); |
| 209 __ jmp(&entry); |
| 210 __ bind(&loop); |
| 211 __ movq(Operand(rcx, 0), rdx); |
| 212 __ addq(rcx, Immediate(kPointerSize)); |
| 213 __ bind(&entry); |
| 214 __ cmpq(rcx, rdi); |
| 215 __ j(less, &loop); |
| 216 } |
| 217 |
| 218 // Add the object tag to make the JSObject real, so that we can continue |
| 219 // and jump into the continuation code at any time from now on. Any |
| 220 // failures need to undo the allocation, so that the heap is in a |
| 221 // consistent state and verifiable. |
| 222 // rax: initial map |
| 223 // rbx: JSObject |
| 224 // rdi: start of next object |
| 225 __ or_(rbx, Immediate(kHeapObjectTag)); |
| 226 |
| 227 // Check if a non-empty properties array is needed. |
| 228 // Allocate and initialize a FixedArray if it is. |
| 229 // rax: initial map |
| 230 // rbx: JSObject |
| 231 // rdi: start of next object |
| 232 // Calculate total properties described map. |
| 233 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); |
| 234 __ movzxbq(rcx, |
| 235 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); |
| 236 __ addq(rdx, rcx); |
| 237 // Calculate unused properties past the end of the in-object properties. |
| 238 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset)); |
| 239 __ subq(rdx, rcx); |
| 240 // Done if no extra properties are to be allocated. |
| 241 __ j(zero, &allocated); |
| 242 __ Assert(positive, "Property allocation count failed."); |
| 243 |
| 244 // Scale the number of elements by pointer size and add the header for |
| 245 // FixedArrays to the start of the next object calculation from above. |
| 246 // rbx: JSObject |
| 247 // rdi: start of next object (will be start of FixedArray) |
| 248 // rdx: number of elements in properties array |
| 249 __ AllocateInNewSpace(FixedArray::kHeaderSize, |
| 250 times_pointer_size, |
| 251 rdx, |
| 252 rdi, |
| 253 rax, |
| 254 no_reg, |
| 255 &undo_allocation, |
| 256 RESULT_CONTAINS_TOP); |
| 257 |
| 258 // Initialize the FixedArray. |
| 259 // rbx: JSObject |
| 260 // rdi: FixedArray |
| 261 // rdx: number of elements |
| 262 // rax: start of next object |
| 263 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); |
| 264 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map |
| 265 __ Integer32ToSmi(rdx, rdx); |
| 266 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length |
| 267 |
| 268 // Initialize the fields to undefined. |
| 269 // rbx: JSObject |
| 270 // rdi: FixedArray |
| 271 // rax: start of next object |
| 272 // rdx: number of elements |
| 273 { Label loop, entry; |
| 274 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
| 275 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize)); |
| 276 __ jmp(&entry); |
| 277 __ bind(&loop); |
| 278 __ movq(Operand(rcx, 0), rdx); |
| 279 __ addq(rcx, Immediate(kPointerSize)); |
| 280 __ bind(&entry); |
| 281 __ cmpq(rcx, rax); |
| 282 __ j(below, &loop); |
| 283 } |
| 284 |
| 285 // Store the initialized FixedArray into the properties field of |
| 286 // the JSObject |
| 287 // rbx: JSObject |
| 288 // rdi: FixedArray |
| 289 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag |
| 290 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi); |
| 291 |
| 292 |
| 293 // Continue with JSObject being successfully allocated |
| 294 // rbx: JSObject |
| 295 __ jmp(&allocated); |
| 296 |
| 297 // Undo the setting of the new top so that the heap is verifiable. For |
| 298 // example, the map's unused properties potentially do not match the |
| 299 // allocated objects unused properties. |
| 300 // rbx: JSObject (previous new top) |
| 301 __ bind(&undo_allocation); |
| 302 __ UndoAllocationInNewSpace(rbx); |
175 } | 303 } |
176 | 304 |
177 // Now allocate the JSObject on the heap. | 305 // Allocate the new receiver object using the runtime call. |
178 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); | 306 // rdi: function (constructor) |
179 __ shl(rdi, Immediate(kPointerSizeLog2)); | 307 __ bind(&rt_call); |
180 // rdi: size of new object | 308 // Must restore rdi (constructor) before calling runtime. |
181 __ AllocateInNewSpace(rdi, | 309 __ movq(rdi, Operand(rsp, 0)); |
182 rbx, | 310 __ push(rdi); |
183 rdi, | 311 __ CallRuntime(Runtime::kNewObject, 1); |
184 no_reg, | 312 __ movq(rbx, rax); // store result in rbx |
185 &rt_call, | 313 |
186 NO_ALLOCATION_FLAGS); | 314 // New object allocated. |
187 // Allocated the JSObject, now initialize the fields. | 315 // rbx: newly allocated object |
188 // rax: initial map | 316 __ bind(&allocated); |
189 // rbx: JSObject (not HeapObject tagged - the actual address). | 317 // Retrieve the function from the stack. |
190 // rdi: start of next object | 318 __ pop(rdi); |
191 __ movq(Operand(rbx, JSObject::kMapOffset), rax); | 319 |
192 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); | 320 // Retrieve smi-tagged arguments count from the stack. |
193 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx); | 321 __ movq(rax, Operand(rsp, 0)); |
194 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx); | 322 __ SmiToInteger32(rax, rax); |
195 // Set extra fields in the newly allocated object. | 323 |
196 // rax: initial map | 324 // Push the allocated receiver to the stack. We need two copies |
197 // rbx: JSObject | 325 // because we may have to return the original one and the calling |
198 // rdi: start of next object | 326 // conventions dictate that the called function pops the receiver. |
199 { Label loop, entry; | 327 __ push(rbx); |
200 // To allow for truncation. | 328 __ push(rbx); |
201 if (count_constructions) { | 329 |
202 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); | 330 // Setup pointer to last argument. |
203 } else { | 331 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); |
204 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | 332 |
205 } | 333 // Copy arguments and receiver to the expression stack. |
206 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); | 334 Label loop, entry; |
207 __ jmp(&entry); | 335 __ movq(rcx, rax); |
208 __ bind(&loop); | 336 __ jmp(&entry); |
209 __ movq(Operand(rcx, 0), rdx); | 337 __ bind(&loop); |
210 __ addq(rcx, Immediate(kPointerSize)); | 338 __ push(Operand(rbx, rcx, times_pointer_size, 0)); |
211 __ bind(&entry); | 339 __ bind(&entry); |
212 __ cmpq(rcx, rdi); | 340 __ decq(rcx); |
213 __ j(less, &loop); | 341 __ j(greater_equal, &loop); |
| 342 |
| 343 // Call the function. |
| 344 if (is_api_function) { |
| 345 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 346 Handle<Code> code = |
| 347 masm->isolate()->builtins()->HandleApiCallConstruct(); |
| 348 ParameterCount expected(0); |
| 349 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET, |
| 350 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); |
| 351 } else { |
| 352 ParameterCount actual(rax); |
| 353 __ InvokeFunction(rdi, actual, CALL_FUNCTION, |
| 354 NullCallWrapper(), CALL_AS_METHOD); |
214 } | 355 } |
215 | 356 |
216 // Add the object tag to make the JSObject real, so that we can continue and | 357 // Restore context from the frame. |
217 // jump into the continuation code at any time from now on. Any failures | 358 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
218 // need to undo the allocation, so that the heap is in a consistent state | 359 |
219 // and verifiable. | 360 // If the result is an object (in the ECMA sense), we should get rid |
220 // rax: initial map | 361 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 |
221 // rbx: JSObject | 362 // on page 74. |
222 // rdi: start of next object | 363 Label use_receiver, exit; |
223 __ or_(rbx, Immediate(kHeapObjectTag)); | 364 // If the result is a smi, it is *not* an object in the ECMA sense. |
224 | 365 __ JumpIfSmi(rax, &use_receiver); |
225 // Check if a non-empty properties array is needed. | 366 |
226 // Allocate and initialize a FixedArray if it is. | 367 // If the type of the result (stored in its map) is less than |
227 // rax: initial map | 368 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. |
228 // rbx: JSObject | 369 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
229 // rdi: start of next object | 370 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); |
230 // Calculate total properties described map. | 371 __ j(above_equal, &exit); |
231 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); | 372 |
232 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); | 373 // Throw away the result of the constructor invocation and use the |
233 __ addq(rdx, rcx); | 374 // on-stack receiver as the result. |
234 // Calculate unused properties past the end of the in-object properties. | 375 __ bind(&use_receiver); |
235 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset)); | 376 __ movq(rax, Operand(rsp, 0)); |
236 __ subq(rdx, rcx); | 377 |
237 // Done if no extra properties are to be allocated. | 378 // Restore the arguments count and leave the construct frame. |
238 __ j(zero, &allocated); | 379 __ bind(&exit); |
239 __ Assert(positive, "Property allocation count failed."); | 380 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count. |
240 | 381 |
241 // Scale the number of elements by pointer size and add the header for | 382 // Leave construct frame. |
242 // FixedArrays to the start of the next object calculation from above. | |
243 // rbx: JSObject | |
244 // rdi: start of next object (will be start of FixedArray) | |
245 // rdx: number of elements in properties array | |
246 __ AllocateInNewSpace(FixedArray::kHeaderSize, | |
247 times_pointer_size, | |
248 rdx, | |
249 rdi, | |
250 rax, | |
251 no_reg, | |
252 &undo_allocation, | |
253 RESULT_CONTAINS_TOP); | |
254 | |
255 // Initialize the FixedArray. | |
256 // rbx: JSObject | |
257 // rdi: FixedArray | |
258 // rdx: number of elements | |
259 // rax: start of next object | |
260 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); | |
261 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map | |
262 __ Integer32ToSmi(rdx, rdx); | |
263 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length | |
264 | |
265 // Initialize the fields to undefined. | |
266 // rbx: JSObject | |
267 // rdi: FixedArray | |
268 // rax: start of next object | |
269 // rdx: number of elements | |
270 { Label loop, entry; | |
271 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | |
272 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize)); | |
273 __ jmp(&entry); | |
274 __ bind(&loop); | |
275 __ movq(Operand(rcx, 0), rdx); | |
276 __ addq(rcx, Immediate(kPointerSize)); | |
277 __ bind(&entry); | |
278 __ cmpq(rcx, rax); | |
279 __ j(below, &loop); | |
280 } | |
281 | |
282 // Store the initialized FixedArray into the properties field of | |
283 // the JSObject | |
284 // rbx: JSObject | |
285 // rdi: FixedArray | |
286 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag | |
287 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi); | |
288 | |
289 | |
290 // Continue with JSObject being successfully allocated | |
291 // rbx: JSObject | |
292 __ jmp(&allocated); | |
293 | |
294 // Undo the setting of the new top so that the heap is verifiable. For | |
295 // example, the map's unused properties potentially do not match the | |
296 // allocated objects unused properties. | |
297 // rbx: JSObject (previous new top) | |
298 __ bind(&undo_allocation); | |
299 __ UndoAllocationInNewSpace(rbx); | |
300 } | 383 } |
301 | 384 |
302 // Allocate the new receiver object using the runtime call. | |
303 // rdi: function (constructor) | |
304 __ bind(&rt_call); | |
305 // Must restore rdi (constructor) before calling runtime. | |
306 __ movq(rdi, Operand(rsp, 0)); | |
307 __ push(rdi); | |
308 __ CallRuntime(Runtime::kNewObject, 1); | |
309 __ movq(rbx, rax); // store result in rbx | |
310 | |
311 // New object allocated. | |
312 // rbx: newly allocated object | |
313 __ bind(&allocated); | |
314 // Retrieve the function from the stack. | |
315 __ pop(rdi); | |
316 | |
317 // Retrieve smi-tagged arguments count from the stack. | |
318 __ movq(rax, Operand(rsp, 0)); | |
319 __ SmiToInteger32(rax, rax); | |
320 | |
321 // Push the allocated receiver to the stack. We need two copies | |
322 // because we may have to return the original one and the calling | |
323 // conventions dictate that the called function pops the receiver. | |
324 __ push(rbx); | |
325 __ push(rbx); | |
326 | |
327 // Setup pointer to last argument. | |
328 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); | |
329 | |
330 // Copy arguments and receiver to the expression stack. | |
331 Label loop, entry; | |
332 __ movq(rcx, rax); | |
333 __ jmp(&entry); | |
334 __ bind(&loop); | |
335 __ push(Operand(rbx, rcx, times_pointer_size, 0)); | |
336 __ bind(&entry); | |
337 __ decq(rcx); | |
338 __ j(greater_equal, &loop); | |
339 | |
340 // Call the function. | |
341 if (is_api_function) { | |
342 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | |
343 Handle<Code> code = | |
344 masm->isolate()->builtins()->HandleApiCallConstruct(); | |
345 ParameterCount expected(0); | |
346 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET, | |
347 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); | |
348 } else { | |
349 ParameterCount actual(rax); | |
350 __ InvokeFunction(rdi, actual, CALL_FUNCTION, | |
351 NullCallWrapper(), CALL_AS_METHOD); | |
352 } | |
353 | |
354 // Restore context from the frame. | |
355 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
356 | |
357 // If the result is an object (in the ECMA sense), we should get rid | |
358 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 | |
359 // on page 74. | |
360 Label use_receiver, exit; | |
361 // If the result is a smi, it is *not* an object in the ECMA sense. | |
362 __ JumpIfSmi(rax, &use_receiver); | |
363 | |
364 // If the type of the result (stored in its map) is less than | |
365 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. | |
366 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | |
367 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); | |
368 __ j(above_equal, &exit); | |
369 | |
370 // Throw away the result of the constructor invocation and use the | |
371 // on-stack receiver as the result. | |
372 __ bind(&use_receiver); | |
373 __ movq(rax, Operand(rsp, 0)); | |
374 | |
375 // Restore the arguments count and leave the construct frame. | |
376 __ bind(&exit); | |
377 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count | |
378 __ LeaveConstructFrame(); | |
379 | |
380 // Remove caller arguments from the stack and return. | 385 // Remove caller arguments from the stack and return. |
381 __ pop(rcx); | 386 __ pop(rcx); |
382 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | 387 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); |
383 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); | 388 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); |
384 __ push(rcx); | 389 __ push(rcx); |
385 Counters* counters = masm->isolate()->counters(); | 390 Counters* counters = masm->isolate()->counters(); |
386 __ IncrementCounter(counters->constructed_objects(), 1); | 391 __ IncrementCounter(counters->constructed_objects(), 1); |
387 __ ret(0); | 392 __ ret(0); |
388 } | 393 } |
389 | 394 |
(...skipping 16 matching lines...) Expand all Loading... |
406 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 411 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
407 bool is_construct) { | 412 bool is_construct) { |
408 // Expects five C++ function parameters. | 413 // Expects five C++ function parameters. |
409 // - Address entry (ignored) | 414 // - Address entry (ignored) |
410 // - JSFunction* function ( | 415 // - JSFunction* function ( |
411 // - Object* receiver | 416 // - Object* receiver |
412 // - int argc | 417 // - int argc |
413 // - Object*** argv | 418 // - Object*** argv |
414 // (see Handle::Invoke in execution.cc). | 419 // (see Handle::Invoke in execution.cc). |
415 | 420 |
416 // Platform specific argument handling. After this, the stack contains | 421 // Open a C++ scope for the FrameScope. |
417 // an internal frame and the pushed function and receiver, and | 422 { |
418 // register rax and rbx holds the argument count and argument array, | 423 // Platform specific argument handling. After this, the stack contains |
419 // while rdi holds the function pointer and rsi the context. | 424 // an internal frame and the pushed function and receiver, and |
| 425 // register rax and rbx holds the argument count and argument array, |
| 426 // while rdi holds the function pointer and rsi the context. |
| 427 |
420 #ifdef _WIN64 | 428 #ifdef _WIN64 |
421 // MSVC parameters in: | 429 // MSVC parameters in: |
422 // rcx : entry (ignored) | 430 // rcx : entry (ignored) |
423 // rdx : function | 431 // rdx : function |
424 // r8 : receiver | 432 // r8 : receiver |
425 // r9 : argc | 433 // r9 : argc |
426 // [rsp+0x20] : argv | 434 // [rsp+0x20] : argv |
427 | 435 |
428 // Clear the context before we push it when entering the JS frame. | 436 // Clear the context before we push it when entering the internal frame. |
429 __ Set(rsi, 0); | 437 __ Set(rsi, 0); |
430 __ EnterInternalFrame(); | 438 // Enter an internal frame. |
| 439 FrameScope scope(masm, StackFrame::INTERNAL); |
431 | 440 |
432 // Load the function context into rsi. | 441 // Load the function context into rsi. |
433 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); | 442 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); |
434 | 443 |
435 // Push the function and the receiver onto the stack. | 444 // Push the function and the receiver onto the stack. |
436 __ push(rdx); | 445 __ push(rdx); |
437 __ push(r8); | 446 __ push(r8); |
438 | 447 |
439 // Load the number of arguments and setup pointer to the arguments. | 448 // Load the number of arguments and setup pointer to the arguments. |
440 __ movq(rax, r9); | 449 __ movq(rax, r9); |
441 // Load the previous frame pointer to access C argument on stack | 450 // Load the previous frame pointer to access C argument on stack |
442 __ movq(kScratchRegister, Operand(rbp, 0)); | 451 __ movq(kScratchRegister, Operand(rbp, 0)); |
443 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); | 452 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); |
444 // Load the function pointer into rdi. | 453 // Load the function pointer into rdi. |
445 __ movq(rdi, rdx); | 454 __ movq(rdi, rdx); |
446 #else // _WIN64 | 455 #else // _WIN64 |
447 // GCC parameters in: | 456 // GCC parameters in: |
448 // rdi : entry (ignored) | 457 // rdi : entry (ignored) |
449 // rsi : function | 458 // rsi : function |
450 // rdx : receiver | 459 // rdx : receiver |
451 // rcx : argc | 460 // rcx : argc |
452 // r8 : argv | 461 // r8 : argv |
453 | 462 |
454 __ movq(rdi, rsi); | 463 __ movq(rdi, rsi); |
455 // rdi : function | 464 // rdi : function |
456 | 465 |
457 // Clear the context before we push it when entering the JS frame. | 466 // Clear the context before we push it when entering the internal frame. |
458 __ Set(rsi, 0); | 467 __ Set(rsi, 0); |
459 // Enter an internal frame. | 468 // Enter an internal frame. |
460 __ EnterInternalFrame(); | 469 FrameScope scope(masm, StackFrame::INTERNAL); |
461 | 470 |
462 // Push the function and receiver and setup the context. | 471 // Push the function and receiver and setup the context. |
463 __ push(rdi); | 472 __ push(rdi); |
464 __ push(rdx); | 473 __ push(rdx); |
465 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 474 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
466 | 475 |
467 // Load the number of arguments and setup pointer to the arguments. | 476 // Load the number of arguments and setup pointer to the arguments. |
468 __ movq(rax, rcx); | 477 __ movq(rax, rcx); |
469 __ movq(rbx, r8); | 478 __ movq(rbx, r8); |
470 #endif // _WIN64 | 479 #endif // _WIN64 |
471 | 480 |
472 // Current stack contents: | 481 // Current stack contents: |
473 // [rsp + 2 * kPointerSize ... ]: Internal frame | 482 // [rsp + 2 * kPointerSize ... ]: Internal frame |
474 // [rsp + kPointerSize] : function | 483 // [rsp + kPointerSize] : function |
475 // [rsp] : receiver | 484 // [rsp] : receiver |
476 // Current register contents: | 485 // Current register contents: |
477 // rax : argc | 486 // rax : argc |
478 // rbx : argv | 487 // rbx : argv |
479 // rsi : context | 488 // rsi : context |
480 // rdi : function | 489 // rdi : function |
481 | 490 |
482 // Copy arguments to the stack in a loop. | 491 // Copy arguments to the stack in a loop. |
483 // Register rbx points to array of pointers to handle locations. | 492 // Register rbx points to array of pointers to handle locations. |
484 // Push the values of these handles. | 493 // Push the values of these handles. |
485 Label loop, entry; | 494 Label loop, entry; |
486 __ Set(rcx, 0); // Set loop variable to 0. | 495 __ Set(rcx, 0); // Set loop variable to 0. |
487 __ jmp(&entry); | 496 __ jmp(&entry); |
488 __ bind(&loop); | 497 __ bind(&loop); |
489 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); | 498 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); |
490 __ push(Operand(kScratchRegister, 0)); // dereference handle | 499 __ push(Operand(kScratchRegister, 0)); // dereference handle |
491 __ addq(rcx, Immediate(1)); | 500 __ addq(rcx, Immediate(1)); |
492 __ bind(&entry); | 501 __ bind(&entry); |
493 __ cmpq(rcx, rax); | 502 __ cmpq(rcx, rax); |
494 __ j(not_equal, &loop); | 503 __ j(not_equal, &loop); |
495 | 504 |
496 // Invoke the code. | 505 // Invoke the code. |
497 if (is_construct) { | 506 if (is_construct) { |
498 // Expects rdi to hold function pointer. | 507 // Expects rdi to hold function pointer. |
499 __ Call(masm->isolate()->builtins()->JSConstructCall(), | 508 __ Call(masm->isolate()->builtins()->JSConstructCall(), |
500 RelocInfo::CODE_TARGET); | 509 RelocInfo::CODE_TARGET); |
501 } else { | 510 } else { |
502 ParameterCount actual(rax); | 511 ParameterCount actual(rax); |
503 // Function must be in rdi. | 512 // Function must be in rdi. |
504 __ InvokeFunction(rdi, actual, CALL_FUNCTION, | 513 __ InvokeFunction(rdi, actual, CALL_FUNCTION, |
505 NullCallWrapper(), CALL_AS_METHOD); | 514 NullCallWrapper(), CALL_AS_METHOD); |
| 515 } |
| 516 // Exit the internal frame. Notice that this also removes the empty |
| 517 // context and the function left on the stack by the code |
| 518 // invocation. |
506 } | 519 } |
507 | 520 |
508 // Exit the JS frame. Notice that this also removes the empty | |
509 // context and the function left on the stack by the code | |
510 // invocation. | |
511 __ LeaveInternalFrame(); | |
512 // TODO(X64): Is argument correct? Is there a receiver to remove? | 521 // TODO(X64): Is argument correct? Is there a receiver to remove? |
513 __ ret(1 * kPointerSize); // remove receiver | 522 __ ret(1 * kPointerSize); // Remove receiver. |
514 } | 523 } |
515 | 524 |
516 | 525 |
517 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 526 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
518 Generate_JSEntryTrampolineHelper(masm, false); | 527 Generate_JSEntryTrampolineHelper(masm, false); |
519 } | 528 } |
520 | 529 |
521 | 530 |
522 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 531 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
523 Generate_JSEntryTrampolineHelper(masm, true); | 532 Generate_JSEntryTrampolineHelper(masm, true); |
524 } | 533 } |
525 | 534 |
526 | 535 |
527 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { | 536 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
528 // Enter an internal frame. | 537 // Enter an internal frame. |
529 __ EnterInternalFrame(); | 538 { |
| 539 FrameScope scope(masm, StackFrame::INTERNAL); |
530 | 540 |
531 // Push a copy of the function onto the stack. | 541 // Push a copy of the function onto the stack. |
532 __ push(rdi); | 542 __ push(rdi); |
533 // Push call kind information. | 543 // Push call kind information. |
534 __ push(rcx); | 544 __ push(rcx); |
535 | 545 |
536 __ push(rdi); // Function is also the parameter to the runtime call. | 546 __ push(rdi); // Function is also the parameter to the runtime call. |
537 __ CallRuntime(Runtime::kLazyCompile, 1); | 547 __ CallRuntime(Runtime::kLazyCompile, 1); |
538 | 548 |
539 // Restore call kind information. | 549 // Restore call kind information. |
540 __ pop(rcx); | 550 __ pop(rcx); |
541 // Restore receiver. | 551 // Restore receiver. |
542 __ pop(rdi); | 552 __ pop(rdi); |
543 | 553 |
544 // Tear down temporary frame. | 554 // Tear down internal frame. |
545 __ LeaveInternalFrame(); | 555 } |
546 | 556 |
547 // Do a tail-call of the compiled function. | 557 // Do a tail-call of the compiled function. |
548 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); | 558 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
549 __ jmp(rax); | 559 __ jmp(rax); |
550 } | 560 } |
551 | 561 |
552 | 562 |
553 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { | 563 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { |
554 // Enter an internal frame. | 564 // Enter an internal frame. |
555 __ EnterInternalFrame(); | 565 { |
| 566 FrameScope scope(masm, StackFrame::INTERNAL); |
556 | 567 |
557 // Push a copy of the function onto the stack. | 568 // Push a copy of the function onto the stack. |
558 __ push(rdi); | 569 __ push(rdi); |
559 // Push call kind information. | 570 // Push call kind information. |
560 __ push(rcx); | 571 __ push(rcx); |
561 | 572 |
562 __ push(rdi); // Function is also the parameter to the runtime call. | 573 __ push(rdi); // Function is also the parameter to the runtime call. |
563 __ CallRuntime(Runtime::kLazyRecompile, 1); | 574 __ CallRuntime(Runtime::kLazyRecompile, 1); |
564 | 575 |
565 // Restore call kind information. | 576 // Restore call kind information. |
566 __ pop(rcx); | 577 __ pop(rcx); |
567 // Restore function. | 578 // Restore function. |
568 __ pop(rdi); | 579 __ pop(rdi); |
569 | 580 |
570 // Tear down temporary frame. | 581 // Tear down internal frame. |
571 __ LeaveInternalFrame(); | 582 } |
572 | 583 |
573 // Do a tail-call of the compiled function. | 584 // Do a tail-call of the compiled function. |
574 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); | 585 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
575 __ jmp(rax); | 586 __ jmp(rax); |
576 } | 587 } |
577 | 588 |
578 | 589 |
579 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 590 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
580 Deoptimizer::BailoutType type) { | 591 Deoptimizer::BailoutType type) { |
581 // Enter an internal frame. | 592 // Enter an internal frame. |
582 __ EnterInternalFrame(); | 593 { |
| 594 FrameScope scope(masm, StackFrame::INTERNAL); |
583 | 595 |
584 // Pass the deoptimization type to the runtime system. | 596 // Pass the deoptimization type to the runtime system. |
585 __ Push(Smi::FromInt(static_cast<int>(type))); | 597 __ Push(Smi::FromInt(static_cast<int>(type))); |
586 | 598 |
587 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 599 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
588 // Tear down temporary frame. | 600 // Tear down internal frame. |
589 __ LeaveInternalFrame(); | 601 } |
590 | 602 |
591 // Get the full codegen state from the stack and untag it. | 603 // Get the full codegen state from the stack and untag it. |
592 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); | 604 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); |
593 | 605 |
594 // Switch on the state. | 606 // Switch on the state. |
595 Label not_no_registers, not_tos_rax; | 607 Label not_no_registers, not_tos_rax; |
596 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS)); | 608 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS)); |
597 __ j(not_equal, ¬_no_registers, Label::kNear); | 609 __ j(not_equal, ¬_no_registers, Label::kNear); |
598 __ ret(1 * kPointerSize); // Remove state. | 610 __ ret(1 * kPointerSize); // Remove state. |
599 | 611 |
(...skipping 16 matching lines...) Expand all Loading... |
616 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 628 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
617 } | 629 } |
618 | 630 |
619 | 631 |
620 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { | 632 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { |
621 // For now, we are relying on the fact that Runtime::NotifyOSR | 633 // For now, we are relying on the fact that Runtime::NotifyOSR |
622 // doesn't do any garbage collection which allows us to save/restore | 634 // doesn't do any garbage collection which allows us to save/restore |
623 // the registers without worrying about which of them contain | 635 // the registers without worrying about which of them contain |
624 // pointers. This seems a bit fragile. | 636 // pointers. This seems a bit fragile. |
625 __ Pushad(); | 637 __ Pushad(); |
626 __ EnterInternalFrame(); | 638 { |
627 __ CallRuntime(Runtime::kNotifyOSR, 0); | 639 FrameScope scope(masm, StackFrame::INTERNAL); |
628 __ LeaveInternalFrame(); | 640 __ CallRuntime(Runtime::kNotifyOSR, 0); |
| 641 } |
629 __ Popad(); | 642 __ Popad(); |
630 __ ret(0); | 643 __ ret(0); |
631 } | 644 } |
632 | 645 |
633 | 646 |
634 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 647 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
635 // Stack Layout: | 648 // Stack Layout: |
636 // rsp[0]: Return address | 649 // rsp[0]: Return address |
637 // rsp[1]: Argument n | 650 // rsp[1]: Argument n |
638 // rsp[2]: Argument n-1 | 651 // rsp[2]: Argument n-1 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
688 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 701 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
689 __ j(equal, &use_global_receiver); | 702 __ j(equal, &use_global_receiver); |
690 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 703 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
691 __ j(equal, &use_global_receiver); | 704 __ j(equal, &use_global_receiver); |
692 | 705 |
693 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 706 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
694 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); | 707 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); |
695 __ j(above_equal, &shift_arguments); | 708 __ j(above_equal, &shift_arguments); |
696 | 709 |
697 __ bind(&convert_to_object); | 710 __ bind(&convert_to_object); |
698 __ EnterInternalFrame(); // In order to preserve argument count. | 711 { |
699 __ Integer32ToSmi(rax, rax); | 712 // Enter an internal frame in order to preserve argument count. |
700 __ push(rax); | 713 FrameScope scope(masm, StackFrame::INTERNAL); |
| 714 __ Integer32ToSmi(rax, rax); |
| 715 __ push(rax); |
701 | 716 |
702 __ push(rbx); | 717 __ push(rbx); |
703 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 718 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
704 __ movq(rbx, rax); | 719 __ movq(rbx, rax); |
705 __ Set(rdx, 0); // indicate regular JS_FUNCTION | 720 __ Set(rdx, 0); // indicate regular JS_FUNCTION |
706 | 721 |
707 __ pop(rax); | 722 __ pop(rax); |
708 __ SmiToInteger32(rax, rax); | 723 __ SmiToInteger32(rax, rax); |
709 __ LeaveInternalFrame(); | 724 } |
| 725 |
710 // Restore the function to rdi. | 726 // Restore the function to rdi. |
711 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); | 727 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); |
712 __ jmp(&patch_receiver, Label::kNear); | 728 __ jmp(&patch_receiver, Label::kNear); |
713 | 729 |
714 // Use the global receiver object from the called function as the | 730 // Use the global receiver object from the called function as the |
715 // receiver. | 731 // receiver. |
716 __ bind(&use_global_receiver); | 732 __ bind(&use_global_receiver); |
717 const int kGlobalIndex = | 733 const int kGlobalIndex = |
718 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 734 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
719 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); | 735 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
800 NullCallWrapper(), CALL_AS_METHOD); | 816 NullCallWrapper(), CALL_AS_METHOD); |
801 } | 817 } |
802 | 818 |
803 | 819 |
804 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 820 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
805 // Stack at entry: | 821 // Stack at entry: |
806 // rsp: return address | 822 // rsp: return address |
807 // rsp+8: arguments | 823 // rsp+8: arguments |
808 // rsp+16: receiver ("this") | 824 // rsp+16: receiver ("this") |
809 // rsp+24: function | 825 // rsp+24: function |
810 __ EnterInternalFrame(); | 826 { |
811 // Stack frame: | 827 FrameScope frame_scope(masm, StackFrame::INTERNAL); |
812 // rbp: Old base pointer | 828 // Stack frame: |
813 // rbp[1]: return address | 829 // rbp: Old base pointer |
814 // rbp[2]: function arguments | 830 // rbp[1]: return address |
815 // rbp[3]: receiver | 831 // rbp[2]: function arguments |
816 // rbp[4]: function | 832 // rbp[3]: receiver |
817 static const int kArgumentsOffset = 2 * kPointerSize; | 833 // rbp[4]: function |
818 static const int kReceiverOffset = 3 * kPointerSize; | 834 static const int kArgumentsOffset = 2 * kPointerSize; |
819 static const int kFunctionOffset = 4 * kPointerSize; | 835 static const int kReceiverOffset = 3 * kPointerSize; |
| 836 static const int kFunctionOffset = 4 * kPointerSize; |
820 | 837 |
821 __ push(Operand(rbp, kFunctionOffset)); | 838 __ push(Operand(rbp, kFunctionOffset)); |
822 __ push(Operand(rbp, kArgumentsOffset)); | 839 __ push(Operand(rbp, kArgumentsOffset)); |
823 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 840 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
824 | 841 |
825 // Check the stack for overflow. We are not trying to catch | 842 // Check the stack for overflow. We are not trying to catch |
826 // interruptions (e.g. debug break and preemption) here, so the "real stack | 843 // interruptions (e.g. debug break and preemption) here, so the "real stack |
827 // limit" is checked. | 844 // limit" is checked. |
828 Label okay; | 845 Label okay; |
829 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); | 846 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); |
830 __ movq(rcx, rsp); | 847 __ movq(rcx, rsp); |
831 // Make rcx the space we have left. The stack might already be overflowed | 848 // Make rcx the space we have left. The stack might already be overflowed |
832 // here which will cause rcx to become negative. | 849 // here which will cause rcx to become negative. |
833 __ subq(rcx, kScratchRegister); | 850 __ subq(rcx, kScratchRegister); |
834 // Make rdx the space we need for the array when it is unrolled onto the | 851 // Make rdx the space we need for the array when it is unrolled onto the |
835 // stack. | 852 // stack. |
836 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); | 853 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); |
837 // Check if the arguments will overflow the stack. | 854 // Check if the arguments will overflow the stack. |
838 __ cmpq(rcx, rdx); | 855 __ cmpq(rcx, rdx); |
839 __ j(greater, &okay); // Signed comparison. | 856 __ j(greater, &okay); // Signed comparison. |
840 | 857 |
841 // Out of stack space. | 858 // Out of stack space. |
842 __ push(Operand(rbp, kFunctionOffset)); | 859 __ push(Operand(rbp, kFunctionOffset)); |
843 __ push(rax); | 860 __ push(rax); |
844 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | 861 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); |
845 __ bind(&okay); | 862 __ bind(&okay); |
846 // End of stack check. | 863 // End of stack check. |
847 | 864 |
848 // Push current index and limit. | 865 // Push current index and limit. |
849 const int kLimitOffset = | 866 const int kLimitOffset = |
850 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; | 867 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; |
851 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; | 868 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; |
852 __ push(rax); // limit | 869 __ push(rax); // limit |
853 __ push(Immediate(0)); // index | 870 __ push(Immediate(0)); // index |
854 | 871 |
855 // Get the receiver. | 872 // Get the receiver. |
856 __ movq(rbx, Operand(rbp, kReceiverOffset)); | 873 __ movq(rbx, Operand(rbp, kReceiverOffset)); |
857 | 874 |
858 // Check that the function is a JS function (otherwise it must be a proxy). | 875 // Check that the function is a JS function (otherwise it must be a proxy). |
859 Label push_receiver; | 876 Label push_receiver; |
860 __ movq(rdi, Operand(rbp, kFunctionOffset)); | 877 __ movq(rdi, Operand(rbp, kFunctionOffset)); |
861 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 878 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
862 __ j(not_equal, &push_receiver); | 879 __ j(not_equal, &push_receiver); |
863 | 880 |
864 // Change context eagerly to get the right global object if necessary. | 881 // Change context eagerly to get the right global object if necessary. |
865 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 882 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
866 | 883 |
867 // Do not transform the receiver for strict mode functions. | 884 // Do not transform the receiver for strict mode functions. |
868 Label call_to_object, use_global_receiver; | 885 Label call_to_object, use_global_receiver; |
869 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 886 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
870 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), | 887 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), |
871 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); | 888 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
872 __ j(not_equal, &push_receiver); | 889 __ j(not_equal, &push_receiver); |
873 | 890 |
874 // Do not transform the receiver for natives. | 891 // Do not transform the receiver for natives. |
875 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), | 892 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), |
876 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | 893 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
877 __ j(not_equal, &push_receiver); | 894 __ j(not_equal, &push_receiver); |
878 | 895 |
879 // Compute the receiver in non-strict mode. | 896 // Compute the receiver in non-strict mode. |
880 __ JumpIfSmi(rbx, &call_to_object, Label::kNear); | 897 __ JumpIfSmi(rbx, &call_to_object, Label::kNear); |
881 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 898 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
882 __ j(equal, &use_global_receiver); | 899 __ j(equal, &use_global_receiver); |
883 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 900 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
884 __ j(equal, &use_global_receiver); | 901 __ j(equal, &use_global_receiver); |
885 | 902 |
886 // If given receiver is already a JavaScript object then there's no | 903 // If given receiver is already a JavaScript object then there's no |
887 // reason for converting it. | 904 // reason for converting it. |
888 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 905 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
889 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); | 906 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); |
890 __ j(above_equal, &push_receiver); | 907 __ j(above_equal, &push_receiver); |
891 | 908 |
892 // Convert the receiver to an object. | 909 // Convert the receiver to an object. |
893 __ bind(&call_to_object); | 910 __ bind(&call_to_object); |
894 __ push(rbx); | 911 __ push(rbx); |
895 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 912 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
896 __ movq(rbx, rax); | 913 __ movq(rbx, rax); |
897 __ jmp(&push_receiver, Label::kNear); | 914 __ jmp(&push_receiver, Label::kNear); |
898 | 915 |
899 // Use the current global receiver object as the receiver. | 916 // Use the current global receiver object as the receiver. |
900 __ bind(&use_global_receiver); | 917 __ bind(&use_global_receiver); |
901 const int kGlobalOffset = | 918 const int kGlobalOffset = |
902 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | 919 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
903 __ movq(rbx, FieldOperand(rsi, kGlobalOffset)); | 920 __ movq(rbx, FieldOperand(rsi, kGlobalOffset)); |
904 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset)); | 921 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset)); |
905 __ movq(rbx, FieldOperand(rbx, kGlobalOffset)); | 922 __ movq(rbx, FieldOperand(rbx, kGlobalOffset)); |
906 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); | 923 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
907 | 924 |
908 // Push the receiver. | 925 // Push the receiver. |
909 __ bind(&push_receiver); | 926 __ bind(&push_receiver); |
910 __ push(rbx); | 927 __ push(rbx); |
911 | 928 |
912 // Copy all arguments from the array to the stack. | 929 // Copy all arguments from the array to the stack. |
913 Label entry, loop; | 930 Label entry, loop; |
914 __ movq(rax, Operand(rbp, kIndexOffset)); | 931 __ movq(rax, Operand(rbp, kIndexOffset)); |
915 __ jmp(&entry); | 932 __ jmp(&entry); |
916 __ bind(&loop); | 933 __ bind(&loop); |
917 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments | 934 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments |
918 | 935 |
919 // Use inline caching to speed up access to arguments. | 936 // Use inline caching to speed up access to arguments. |
920 Handle<Code> ic = | 937 Handle<Code> ic = |
921 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); | 938 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); |
922 __ Call(ic, RelocInfo::CODE_TARGET); | 939 __ Call(ic, RelocInfo::CODE_TARGET); |
923 // It is important that we do not have a test instruction after the | 940 // It is important that we do not have a test instruction after the |
924 // call. A test instruction after the call is used to indicate that | 941 // call. A test instruction after the call is used to indicate that |
925 // we have generated an inline version of the keyed load. In this | 942 // we have generated an inline version of the keyed load. In this |
926 // case, we know that we are not generating a test instruction next. | 943 // case, we know that we are not generating a test instruction next. |
927 | 944 |
928 // Push the nth argument. | 945 // Push the nth argument. |
929 __ push(rax); | 946 __ push(rax); |
930 | 947 |
931 // Update the index on the stack and in register rax. | 948 // Update the index on the stack and in register rax. |
932 __ movq(rax, Operand(rbp, kIndexOffset)); | 949 __ movq(rax, Operand(rbp, kIndexOffset)); |
933 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 950 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
934 __ movq(Operand(rbp, kIndexOffset), rax); | 951 __ movq(Operand(rbp, kIndexOffset), rax); |
935 | 952 |
936 __ bind(&entry); | 953 __ bind(&entry); |
937 __ cmpq(rax, Operand(rbp, kLimitOffset)); | 954 __ cmpq(rax, Operand(rbp, kLimitOffset)); |
938 __ j(not_equal, &loop); | 955 __ j(not_equal, &loop); |
939 | 956 |
940 // Invoke the function. | 957 // Invoke the function. |
941 Label call_proxy; | 958 Label call_proxy; |
942 ParameterCount actual(rax); | 959 ParameterCount actual(rax); |
943 __ SmiToInteger32(rax, rax); | 960 __ SmiToInteger32(rax, rax); |
944 __ movq(rdi, Operand(rbp, kFunctionOffset)); | 961 __ movq(rdi, Operand(rbp, kFunctionOffset)); |
945 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 962 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
946 __ j(not_equal, &call_proxy); | 963 __ j(not_equal, &call_proxy); |
947 __ InvokeFunction(rdi, actual, CALL_FUNCTION, | 964 __ InvokeFunction(rdi, actual, CALL_FUNCTION, |
948 NullCallWrapper(), CALL_AS_METHOD); | 965 NullCallWrapper(), CALL_AS_METHOD); |
949 | 966 |
950 __ LeaveInternalFrame(); | 967 frame_scope.GenerateLeaveFrame(); |
951 __ ret(3 * kPointerSize); // remove this, receiver, and arguments | 968 __ ret(3 * kPointerSize); // remove this, receiver, and arguments |
952 | 969 |
953 // Invoke the function proxy. | 970 // Invoke the function proxy. |
954 __ bind(&call_proxy); | 971 __ bind(&call_proxy); |
955 __ push(rdi); // add function proxy as last argument | 972 __ push(rdi); // add function proxy as last argument |
956 __ incq(rax); | 973 __ incq(rax); |
957 __ Set(rbx, 0); | 974 __ Set(rbx, 0); |
958 __ SetCallKind(rcx, CALL_AS_METHOD); | 975 __ SetCallKind(rcx, CALL_AS_METHOD); |
959 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 976 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
960 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 977 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
961 RelocInfo::CODE_TARGET); | 978 RelocInfo::CODE_TARGET); |
962 | 979 |
963 __ LeaveInternalFrame(); | 980 // Leave internal frame. |
| 981 } |
964 __ ret(3 * kPointerSize); // remove this, receiver, and arguments | 982 __ ret(3 * kPointerSize); // remove this, receiver, and arguments |
965 } | 983 } |
966 | 984 |
967 | 985 |
968 // Number of empty elements to allocate for an empty array. | 986 // Number of empty elements to allocate for an empty array. |
969 static const int kPreallocatedArrayElements = 4; | 987 static const int kPreallocatedArrayElements = 4; |
970 | 988 |
971 | 989 |
972 // Allocate an empty JSArray. The allocated array is put into the result | 990 // Allocate an empty JSArray. The allocated array is put into the result |
973 // register. If the parameter initial_capacity is larger than zero an elements | 991 // register. If the parameter initial_capacity is larger than zero an elements |
(...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1513 // should perform a stack guard check so we can get interrupts while | 1531 // should perform a stack guard check so we can get interrupts while |
1514 // waiting for on-stack replacement. | 1532 // waiting for on-stack replacement. |
1515 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 1533 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
1516 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); | 1534 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); |
1517 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); | 1535 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); |
1518 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); | 1536 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); |
1519 __ j(greater, &stack_check); | 1537 __ j(greater, &stack_check); |
1520 | 1538 |
1521 // Pass the function to optimize as the argument to the on-stack | 1539 // Pass the function to optimize as the argument to the on-stack |
1522 // replacement runtime function. | 1540 // replacement runtime function. |
1523 __ EnterInternalFrame(); | 1541 { |
1524 __ push(rax); | 1542 FrameScope scope(masm, StackFrame::INTERNAL); |
1525 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); | 1543 __ push(rax); |
1526 __ LeaveInternalFrame(); | 1544 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); |
| 1545 } |
1527 | 1546 |
1528 // If the result was -1 it means that we couldn't optimize the | 1547 // If the result was -1 it means that we couldn't optimize the |
1529 // function. Just return and continue in the unoptimized version. | 1548 // function. Just return and continue in the unoptimized version. |
1530 Label skip; | 1549 Label skip; |
1531 __ SmiCompare(rax, Smi::FromInt(-1)); | 1550 __ SmiCompare(rax, Smi::FromInt(-1)); |
1532 __ j(not_equal, &skip, Label::kNear); | 1551 __ j(not_equal, &skip, Label::kNear); |
1533 __ ret(0); | 1552 __ ret(0); |
1534 | 1553 |
1535 // If we decide not to perform on-stack replacement we perform a | 1554 // If we decide not to perform on-stack replacement we perform a |
1536 // stack guard check to enable interrupts. | 1555 // stack guard check to enable interrupts. |
1537 __ bind(&stack_check); | 1556 __ bind(&stack_check); |
1538 Label ok; | 1557 Label ok; |
1539 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); | 1558 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
1540 __ j(above_equal, &ok, Label::kNear); | 1559 __ j(above_equal, &ok, Label::kNear); |
1541 | 1560 |
1542 StackCheckStub stub; | 1561 StackCheckStub stub; |
1543 __ TailCallStub(&stub); | 1562 __ TailCallStub(&stub); |
1544 __ Abort("Unreachable code: returned from tail call."); | 1563 if (FLAG_debug_code) { |
| 1564 __ Abort("Unreachable code: returned from tail call."); |
| 1565 } |
1545 __ bind(&ok); | 1566 __ bind(&ok); |
1546 __ ret(0); | 1567 __ ret(0); |
1547 | 1568 |
1548 __ bind(&skip); | 1569 __ bind(&skip); |
1549 // Untag the AST id and push it on the stack. | 1570 // Untag the AST id and push it on the stack. |
1550 __ SmiToInteger32(rax, rax); | 1571 __ SmiToInteger32(rax, rax); |
1551 __ push(rax); | 1572 __ push(rax); |
1552 | 1573 |
1553 // Generate the code for doing the frame-to-frame translation using | 1574 // Generate the code for doing the frame-to-frame translation using |
1554 // the deoptimizer infrastructure. | 1575 // the deoptimizer infrastructure. |
1555 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); | 1576 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); |
1556 generator.Generate(); | 1577 generator.Generate(); |
1557 } | 1578 } |
1558 | 1579 |
1559 | 1580 |
1560 #undef __ | 1581 #undef __ |
1561 | 1582 |
1562 } } // namespace v8::internal | 1583 } } // namespace v8::internal |
1563 | 1584 |
1564 #endif // V8_TARGET_ARCH_X64 | 1585 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |