OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
161 __ ret(0); | 161 __ ret(0); |
162 | 162 |
163 // ------------------------------------------- | 163 // ------------------------------------------- |
164 // Dont adapt arguments. | 164 // Dont adapt arguments. |
165 // ------------------------------------------- | 165 // ------------------------------------------- |
166 __ bind(&dont_adapt_arguments); | 166 __ bind(&dont_adapt_arguments); |
167 __ jmp(rdx); | 167 __ jmp(rdx); |
168 } | 168 } |
169 | 169 |
170 | 170 |
| 171 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { |
| 172 // Stack Layout: |
| 173 // rsp: return address |
| 174 // +1: Argument n |
| 175 // +2: Argument n-1 |
| 176 // ... |
| 177 // +n: Argument 1 = receiver |
| 178 // +n+1: Argument 0 = function to call |
| 179 // |
| 180 // rax contains the number of arguments, n, not counting the function. |
| 181 // |
| 182 // 1. Make sure we have at least one argument. |
| 183 { Label done; |
| 184 __ testq(rax, rax); |
| 185 __ j(not_zero, &done); |
| 186 __ pop(rbx); |
| 187 __ Push(Factory::undefined_value()); |
| 188 __ push(rbx); |
| 189 __ incq(rax); |
| 190 __ bind(&done); |
| 191 } |
| 192 |
| 193 // 2. Get the function to call from the stack. |
| 194 { Label done, non_function, function; |
| 195 // The function to call is at position n+1 on the stack. |
| 196 __ movq(rdi, Operand(rsp, rax, times_pointer_size, +1 * kPointerSize)); |
| 197 __ testl(rdi, Immediate(kSmiTagMask)); |
| 198 __ j(zero, &non_function); |
| 199 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 200 __ j(equal, &function); |
| 201 |
| 202 // Non-function called: Clear the function to force exception. |
| 203 __ bind(&non_function); |
| 204 __ xor_(rdi, rdi); |
| 205 __ jmp(&done); |
| 206 |
| 207 // Function called: Change context eagerly to get the right global object. |
| 208 __ bind(&function); |
| 209 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 210 |
| 211 __ bind(&done); |
| 212 } |
| 213 |
| 214 // 3. Make sure first argument is an object; convert if necessary. |
| 215 { Label call_to_object, use_global_receiver, patch_receiver, done; |
| 216 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); |
| 217 |
| 218 __ testl(rbx, Immediate(kSmiTagMask)); |
| 219 __ j(zero, &call_to_object); |
| 220 |
| 221 __ Cmp(rbx, Factory::null_value()); |
| 222 __ j(equal, &use_global_receiver); |
| 223 __ Cmp(rbx, Factory::undefined_value()); |
| 224 __ j(equal, &use_global_receiver); |
| 225 |
| 226 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); |
| 227 __ j(below, &call_to_object); |
| 228 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE); |
| 229 __ j(below_equal, &done); |
| 230 |
| 231 __ bind(&call_to_object); |
| 232 __ EnterInternalFrame(); // preserves rax, rbx, rdi |
| 233 |
| 234 // Store the arguments count on the stack (smi tagged). |
| 235 ASSERT(kSmiTag == 0); |
| 236 __ shl(rax, Immediate(kSmiTagSize)); |
| 237 __ push(rax); |
| 238 |
| 239 __ push(rdi); // save edi across the call |
| 240 __ push(rbx); |
| 241 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 242 __ movq(rbx, rax); |
| 243 __ pop(rdi); // restore edi after the call |
| 244 |
| 245 // Get the arguments count and untag it. |
| 246 __ pop(rax); |
| 247 __ shr(rax, Immediate(kSmiTagSize)); |
| 248 |
| 249 __ LeaveInternalFrame(); |
| 250 __ jmp(&patch_receiver); |
| 251 |
| 252 // Use the global receiver object from the called function as the receiver. |
| 253 __ bind(&use_global_receiver); |
| 254 const int kGlobalIndex = |
| 255 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
| 256 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); |
| 257 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
| 258 |
| 259 __ bind(&patch_receiver); |
| 260 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx); |
| 261 |
| 262 __ bind(&done); |
| 263 } |
| 264 |
| 265 // 4. Shift stuff one slot down the stack. |
| 266 { Label loop; |
| 267 __ lea(rcx, Operand(rax, +1)); // +1 ~ copy receiver too |
| 268 __ bind(&loop); |
| 269 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0)); |
| 270 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); |
| 271 __ decq(rcx); |
| 272 __ j(not_zero, &loop); |
| 273 } |
| 274 |
| 275 // 5. Remove TOS (copy of last arguments), but keep return address. |
| 276 __ pop(rbx); |
| 277 __ pop(rcx); |
| 278 __ push(rbx); |
| 279 __ decq(rax); |
| 280 |
| 281 // 6. Check that function really was a function and get the code to |
| 282 // call from the function and check that the number of expected |
| 283 // arguments matches what we're providing. |
| 284 { Label invoke, trampoline; |
| 285 __ testq(rdi, rdi); |
| 286 __ j(not_zero, &invoke); |
| 287 __ xor_(rbx, rbx); |
| 288 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
| 289 __ bind(&trampoline); |
| 290 __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)), |
| 291 RelocInfo::CODE_TARGET); |
| 292 |
| 293 __ bind(&invoke); |
| 294 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 295 __ movsxlq(rbx, |
| 296 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 297 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); |
| 298 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); |
| 299 __ cmpq(rax, rbx); |
| 300 __ j(not_equal, &trampoline); |
| 301 } |
| 302 |
| 303 // 7. Jump (tail-call) to the code in register edx without checking arguments. |
| 304 ParameterCount expected(0); |
| 305 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION); |
| 306 } |
| 307 |
| 308 |
171 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 309 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
172 masm->int3(); // UNIMPLEMENTED. | 310 // Stack at entry: |
173 masm->movq(kScratchRegister, Immediate(0xBEFA)); // Debugging aid. | 311 // rsp: return address |
| 312 // rsp+8: arguments |
| 313 // rsp+16: receiver ("this") |
| 314 // rsp+24: function |
| 315 __ EnterInternalFrame(); |
| 316 // Stack frame: |
| 317 // rbp: Old base pointer |
| 318 // rbp[1]: return address |
| 319 // rbp[2]: function arguments |
| 320 // rbp[3]: receiver |
| 321 // rbp[4]: function |
| 322 static const int kArgumentsOffset = 2 * kPointerSize; |
| 323 static const int kReceiverOffset = 3 * kPointerSize; |
| 324 static const int kFunctionOffset = 4 * kPointerSize; |
| 325 __ push(Operand(rbp, kFunctionOffset)); |
| 326 __ push(Operand(rbp, kArgumentsOffset)); |
| 327 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
| 328 |
| 329 if (FLAG_check_stack) { |
| 330 // We need to catch preemptions right here, otherwise an unlucky preemption |
| 331 // could show up as a failed apply. |
| 332 Label retry_preemption; |
| 333 Label no_preemption; |
| 334 __ bind(&retry_preemption); |
| 335 ExternalReference stack_guard_limit = |
| 336 ExternalReference::address_of_stack_guard_limit(); |
| 337 __ movq(kScratchRegister, stack_guard_limit); |
| 338 __ movq(rcx, rsp); |
| 339 __ subq(rcx, Operand(kScratchRegister, 0)); |
| 340 // rcx contains the difference between the stack limit and the stack top. |
| 341 // We use it below to check that there is enough room for the arguments. |
| 342 __ j(above, &no_preemption); |
| 343 |
| 344 // Preemption! |
| 345 // Because runtime functions always remove the receiver from the stack, we |
| 346 // have to fake one to avoid underflowing the stack. |
| 347 __ push(rax); |
| 348 __ push(Immediate(Smi::FromInt(0))); |
| 349 |
| 350 // Do call to runtime routine. |
| 351 __ CallRuntime(Runtime::kStackGuard, 1); |
| 352 __ pop(rax); |
| 353 __ jmp(&retry_preemption); |
| 354 |
| 355 __ bind(&no_preemption); |
| 356 |
| 357 Label okay; |
| 358 // Make rdx the space we need for the array when it is unrolled onto the |
| 359 // stack. |
| 360 __ movq(rdx, rax); |
| 361 __ shl(rdx, Immediate(kPointerSizeLog2 - kSmiTagSize)); |
| 362 __ cmpq(rcx, rdx); |
| 363 __ j(greater, &okay); |
| 364 |
| 365 // Too bad: Out of stack space. |
| 366 __ push(Operand(rbp, kFunctionOffset)); |
| 367 __ push(rax); |
| 368 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); |
| 369 __ bind(&okay); |
| 370 } |
| 371 |
| 372 // Push current index and limit. |
| 373 const int kLimitOffset = |
| 374 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; |
| 375 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; |
| 376 __ push(rax); // limit |
| 377 __ push(Immediate(0)); // index |
| 378 |
| 379 // Change context eagerly to get the right global object if |
| 380 // necessary. |
| 381 __ movq(rdi, Operand(rbp, kFunctionOffset)); |
| 382 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 383 |
| 384 // Compute the receiver. |
| 385 Label call_to_object, use_global_receiver, push_receiver; |
| 386 __ movq(rbx, Operand(rbp, kReceiverOffset)); |
| 387 __ testl(rbx, Immediate(kSmiTagMask)); |
| 388 __ j(zero, &call_to_object); |
| 389 __ Cmp(rbx, Factory::null_value()); |
| 390 __ j(equal, &use_global_receiver); |
| 391 __ Cmp(rbx, Factory::undefined_value()); |
| 392 __ j(equal, &use_global_receiver); |
| 393 |
| 394 // If given receiver is already a JavaScript object then there's no |
| 395 // reason for converting it. |
| 396 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); |
| 397 __ j(less, &call_to_object); |
| 398 __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE); |
| 399 __ j(less_equal, &push_receiver); |
| 400 |
| 401 // Convert the receiver to an object. |
| 402 __ bind(&call_to_object); |
| 403 __ push(rbx); |
| 404 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 405 __ movq(rbx, rax); |
| 406 __ jmp(&push_receiver); |
| 407 |
| 408 // Use the current global receiver object as the receiver. |
| 409 __ bind(&use_global_receiver); |
| 410 const int kGlobalOffset = |
| 411 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; |
| 412 __ movq(rbx, FieldOperand(rsi, kGlobalOffset)); |
| 413 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
| 414 |
| 415 // Push the receiver. |
| 416 __ bind(&push_receiver); |
| 417 __ push(rbx); |
| 418 |
| 419 // Copy all arguments from the array to the stack. |
| 420 Label entry, loop; |
| 421 __ movq(rax, Operand(rbp, kIndexOffset)); |
| 422 __ jmp(&entry); |
| 423 __ bind(&loop); |
| 424 __ movq(rcx, Operand(rbp, kArgumentsOffset)); // load arguments |
| 425 __ push(rcx); |
| 426 __ push(rax); |
| 427 |
| 428 // Use inline caching to speed up access to arguments. |
| 429 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 430 __ Call(ic, RelocInfo::CODE_TARGET); |
| 431 // It is important that we do not have a test instruction after the |
| 432 // call. A test instruction after the call is used to indicate that |
| 433 // we have generated an inline version of the keyed load. In this |
| 434 // case, we know that we are not generating a test instruction next. |
| 435 |
| 436 // Remove IC arguments from the stack and push the nth argument. |
| 437 __ addq(rsp, Immediate(2 * kPointerSize)); |
| 438 __ push(rax); |
| 439 |
| 440 // Update the index on the stack and in register rax. |
| 441 __ movq(rax, Operand(rbp, kIndexOffset)); |
| 442 __ addq(rax, Immediate(Smi::FromInt(1))); |
| 443 __ movq(Operand(rbp, kIndexOffset), rax); |
| 444 |
| 445 __ bind(&entry); |
| 446 __ cmpq(rax, Operand(rbp, kLimitOffset)); |
| 447 __ j(not_equal, &loop); |
| 448 |
| 449 // Invoke the function. |
| 450 ParameterCount actual(rax); |
| 451 __ shr(rax, Immediate(kSmiTagSize)); |
| 452 __ movq(rdi, Operand(rbp, kFunctionOffset)); |
| 453 __ InvokeFunction(rdi, actual, CALL_FUNCTION); |
| 454 |
| 455 __ LeaveInternalFrame(); |
| 456 __ ret(3 * kPointerSize); // remove function, receiver, and arguments |
174 } | 457 } |
175 | 458 |
176 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { | 459 |
177 masm->int3(); // UNIMPLEMENTED. | |
178 masm->movq(kScratchRegister, Immediate(0xBEFC)); // Debugging aid. | |
179 } | |
180 | |
181 | |
182 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { | 460 void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { |
183 // ----------- S t a t e ------------- | 461 // ----------- S t a t e ------------- |
184 // -- rax: number of arguments | 462 // -- rax: number of arguments |
185 // -- rdi: constructor function | 463 // -- rdi: constructor function |
186 // ----------------------------------- | 464 // ----------------------------------- |
187 | 465 |
188 Label non_function_call; | 466 Label non_function_call; |
189 // Check that function is not a smi. | 467 // Check that function is not a smi. |
190 __ testl(rdi, Immediate(kSmiTagMask)); | 468 __ testl(rdi, Immediate(kSmiTagMask)); |
191 __ j(zero, &non_function_call); | 469 __ j(zero, &non_function_call); |
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
420 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { | 698 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
421 Generate_JSEntryTrampolineHelper(masm, false); | 699 Generate_JSEntryTrampolineHelper(masm, false); |
422 } | 700 } |
423 | 701 |
424 | 702 |
425 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { | 703 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
426 Generate_JSEntryTrampolineHelper(masm, true); | 704 Generate_JSEntryTrampolineHelper(masm, true); |
427 } | 705 } |
428 | 706 |
429 } } // namespace v8::internal | 707 } } // namespace v8::internal |
OLD | NEW |