| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 455 __ push(descriptor->register_params_[i]); | 455 __ push(descriptor->register_params_[i]); |
| 456 } | 456 } |
| 457 ExternalReference miss = descriptor->miss_handler(); | 457 ExternalReference miss = descriptor->miss_handler(); |
| 458 __ CallExternalReference(miss, descriptor->register_param_count_); | 458 __ CallExternalReference(miss, descriptor->register_param_count_); |
| 459 } | 459 } |
| 460 | 460 |
| 461 __ Ret(); | 461 __ Ret(); |
| 462 } | 462 } |
| 463 | 463 |
| 464 | 464 |
| 465 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | |
| 466 // Stack layout on entry: | |
| 467 // | |
| 468 // [rsp + (1 * kPointerSize)] : function | |
| 469 // [rsp + (2 * kPointerSize)] : serialized scope info | |
| 470 | |
| 471 // Try to allocate the context in new space. | |
| 472 Label gc; | |
| 473 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | |
| 474 __ Allocate(FixedArray::SizeFor(length), | |
| 475 rax, rbx, rcx, &gc, TAG_OBJECT); | |
| 476 | |
| 477 // Get the function from the stack. | |
| 478 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | |
| 479 __ movp(rcx, args.GetArgumentOperand(1)); | |
| 480 // Get the serialized scope info from the stack. | |
| 481 __ movp(rbx, args.GetArgumentOperand(0)); | |
| 482 | |
| 483 // Set up the object header. | |
| 484 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | |
| 485 __ movp(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | |
| 486 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | |
| 487 | |
| 488 // If this block context is nested in the native context we get a smi | |
| 489 // sentinel instead of a function. The block context should get the | |
| 490 // canonical empty function of the native context as its closure which | |
| 491 // we still have to look up. | |
| 492 Label after_sentinel; | |
| 493 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); | |
| 494 if (FLAG_debug_code) { | |
| 495 __ cmpq(rcx, Immediate(0)); | |
| 496 __ Assert(equal, kExpected0AsASmiSentinel); | |
| 497 } | |
| 498 __ movp(rcx, GlobalObjectOperand()); | |
| 499 __ movp(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); | |
| 500 __ movp(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); | |
| 501 __ bind(&after_sentinel); | |
| 502 | |
| 503 // Set up the fixed slots. | |
| 504 __ movp(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); | |
| 505 __ movp(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); | |
| 506 __ movp(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); | |
| 507 | |
| 508 // Copy the global object from the previous context. | |
| 509 __ movp(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX)); | |
| 510 __ movp(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx); | |
| 511 | |
| 512 // Initialize the rest of the slots to the hole value. | |
| 513 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); | |
| 514 for (int i = 0; i < slots_; i++) { | |
| 515 __ movp(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx); | |
| 516 } | |
| 517 | |
| 518 // Return and remove the on-stack parameter. | |
| 519 __ movp(rsi, rax); | |
| 520 __ ret(2 * kPointerSize); | |
| 521 | |
| 522 // Need to collect. Call into runtime system. | |
| 523 __ bind(&gc); | |
| 524 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | |
| 525 } | |
| 526 | |
| 527 | |
| 528 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 465 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 529 __ PushCallerSaved(save_doubles_); | 466 __ PushCallerSaved(save_doubles_); |
| 530 const int argument_count = 1; | 467 const int argument_count = 1; |
| 531 __ PrepareCallCFunction(argument_count); | 468 __ PrepareCallCFunction(argument_count); |
| 532 __ LoadAddress(arg_reg_1, | 469 __ LoadAddress(arg_reg_1, |
| 533 ExternalReference::isolate_address(masm->isolate())); | 470 ExternalReference::isolate_address(masm->isolate())); |
| 534 | 471 |
| 535 AllowExternalCallThatCantCauseGC scope(masm); | 472 AllowExternalCallThatCantCauseGC scope(masm); |
| 536 __ CallCFunction( | 473 __ CallCFunction( |
| 537 ExternalReference::store_buffer_overflow_function(masm->isolate()), | 474 ExternalReference::store_buffer_overflow_function(masm->isolate()), |
| (...skipping 4797 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5335 return_value_operand, | 5272 return_value_operand, |
| 5336 restore_context ? &context_restore_operand : NULL); | 5273 restore_context ? &context_restore_operand : NULL); |
| 5337 } | 5274 } |
| 5338 | 5275 |
| 5339 | 5276 |
| 5340 #undef __ | 5277 #undef __ |
| 5341 | 5278 |
| 5342 } } // namespace v8::internal | 5279 } } // namespace v8::internal |
| 5343 | 5280 |
| 5344 #endif // V8_TARGET_ARCH_X64 | 5281 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |