| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 21 matching lines...) Expand all Loading... |
| 32 #include "bootstrapper.h" | 32 #include "bootstrapper.h" |
| 33 #include "code-stubs.h" | 33 #include "code-stubs.h" |
| 34 #include "regexp-macro-assembler.h" | 34 #include "regexp-macro-assembler.h" |
| 35 #include "stub-cache.h" | 35 #include "stub-cache.h" |
| 36 #include "runtime.h" | 36 #include "runtime.h" |
| 37 | 37 |
| 38 namespace v8 { | 38 namespace v8 { |
| 39 namespace internal { | 39 namespace internal { |
| 40 | 40 |
| 41 | 41 |
| 42 void FastNewClosureStub::InitializeInterfaceDescriptor( |
| 43 Isolate* isolate, |
| 44 CodeStubInterfaceDescriptor* descriptor) { |
| 45 static Register registers[] = { rbx }; |
| 46 descriptor->register_param_count_ = 1; |
| 47 descriptor->register_params_ = registers; |
| 48 descriptor->deoptimization_handler_ = |
| 49 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry; |
| 50 } |
| 51 |
| 52 |
| 42 void ToNumberStub::InitializeInterfaceDescriptor( | 53 void ToNumberStub::InitializeInterfaceDescriptor( |
| 43 Isolate* isolate, | 54 Isolate* isolate, |
| 44 CodeStubInterfaceDescriptor* descriptor) { | 55 CodeStubInterfaceDescriptor* descriptor) { |
| 45 static Register registers[] = { rax }; | 56 static Register registers[] = { rax }; |
| 46 descriptor->register_param_count_ = 1; | 57 descriptor->register_param_count_ = 1; |
| 47 descriptor->register_params_ = registers; | 58 descriptor->register_params_ = registers; |
| 48 descriptor->deoptimization_handler_ = NULL; | 59 descriptor->deoptimization_handler_ = NULL; |
| 49 } | 60 } |
| 50 | 61 |
| 51 | 62 |
| (...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 288 __ push(descriptor->register_params_[i]); | 299 __ push(descriptor->register_params_[i]); |
| 289 } | 300 } |
| 290 ExternalReference miss = descriptor->miss_handler(); | 301 ExternalReference miss = descriptor->miss_handler(); |
| 291 __ CallExternalReference(miss, descriptor->register_param_count_); | 302 __ CallExternalReference(miss, descriptor->register_param_count_); |
| 292 } | 303 } |
| 293 | 304 |
| 294 __ Ret(); | 305 __ Ret(); |
| 295 } | 306 } |
| 296 | 307 |
| 297 | 308 |
| 298 void FastNewClosureStub::Generate(MacroAssembler* masm) { | |
| 299 // Create a new closure from the given function info in new | |
| 300 // space. Set the context to the current context in rsi. | |
| 301 Counters* counters = masm->isolate()->counters(); | |
| 302 | |
| 303 Label gc; | |
| 304 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); | |
| 305 | |
| 306 __ IncrementCounter(counters->fast_new_closure_total(), 1); | |
| 307 | |
| 308 // Get the function info from the stack. | |
| 309 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | |
| 310 | |
| 311 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); | |
| 312 | |
| 313 // Compute the function map in the current native context and set that | |
| 314 // as the map of the allocated object. | |
| 315 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | |
| 316 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); | |
| 317 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); | |
| 318 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); | |
| 319 | |
| 320 // Initialize the rest of the function. We don't have to update the | |
| 321 // write barrier because the allocated object is in new space. | |
| 322 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); | |
| 323 __ LoadRoot(r8, Heap::kTheHoleValueRootIndex); | |
| 324 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); | |
| 325 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); | |
| 326 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); | |
| 327 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), r8); | |
| 328 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx); | |
| 329 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi); | |
| 330 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx); | |
| 331 | |
| 332 // Initialize the code pointer in the function to be the one | |
| 333 // found in the shared function info object. | |
| 334 // But first check if there is an optimized version for our context. | |
| 335 Label check_optimized; | |
| 336 Label install_unoptimized; | |
| 337 if (FLAG_cache_optimized_code) { | |
| 338 __ movq(rbx, | |
| 339 FieldOperand(rdx, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
| 340 __ testq(rbx, rbx); | |
| 341 __ j(not_zero, &check_optimized, Label::kNear); | |
| 342 } | |
| 343 __ bind(&install_unoptimized); | |
| 344 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), | |
| 345 rdi); // Initialize with undefined. | |
| 346 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); | |
| 347 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); | |
| 348 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); | |
| 349 | |
| 350 // Return and remove the on-stack parameter. | |
| 351 __ ret(1 * kPointerSize); | |
| 352 | |
| 353 __ bind(&check_optimized); | |
| 354 | |
| 355 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); | |
| 356 | |
| 357 // rcx holds native context, rbx points to fixed array of 3-element entries | |
| 358 // (native context, optimized code, literals). | |
| 359 // The optimized code map must never be empty, so check the first elements. | |
| 360 Label install_optimized; | |
| 361 // Speculatively move code object into edx. | |
| 362 __ movq(rdx, FieldOperand(rbx, SharedFunctionInfo::kFirstCodeSlot)); | |
| 363 __ cmpq(rcx, FieldOperand(rbx, SharedFunctionInfo::kFirstContextSlot)); | |
| 364 __ j(equal, &install_optimized); | |
| 365 | |
| 366 // Iterate through the rest of map backwards. rdx holds an index. | |
| 367 Label loop; | |
| 368 Label restore; | |
| 369 __ movq(rdx, FieldOperand(rbx, FixedArray::kLengthOffset)); | |
| 370 __ SmiToInteger32(rdx, rdx); | |
| 371 __ bind(&loop); | |
| 372 // Do not double check first entry. | |
| 373 __ cmpq(rdx, Immediate(SharedFunctionInfo::kSecondEntryIndex)); | |
| 374 __ j(equal, &restore); | |
| 375 __ subq(rdx, Immediate(SharedFunctionInfo::kEntryLength)); | |
| 376 __ cmpq(rcx, FieldOperand(rbx, | |
| 377 rdx, | |
| 378 times_pointer_size, | |
| 379 FixedArray::kHeaderSize)); | |
| 380 __ j(not_equal, &loop, Label::kNear); | |
| 381 // Hit: fetch the optimized code. | |
| 382 __ movq(rdx, FieldOperand(rbx, | |
| 383 rdx, | |
| 384 times_pointer_size, | |
| 385 FixedArray::kHeaderSize + 1 * kPointerSize)); | |
| 386 | |
| 387 __ bind(&install_optimized); | |
| 388 __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1); | |
| 389 | |
| 390 // TODO(fschneider): Idea: store proper code pointers in the map and either | |
| 391 // unmangle them on marking or do nothing as the whole map is discarded on | |
| 392 // major GC anyway. | |
| 393 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); | |
| 394 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); | |
| 395 | |
| 396 // Now link a function into a list of optimized functions. | |
| 397 __ movq(rdx, ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 398 | |
| 399 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdx); | |
| 400 // No need for write barrier as JSFunction (rax) is in the new space. | |
| 401 | |
| 402 __ movq(ContextOperand(rcx, Context::OPTIMIZED_FUNCTIONS_LIST), rax); | |
| 403 // Store JSFunction (rax) into rdx before issuing write barrier as | |
| 404 // it clobbers all the registers passed. | |
| 405 __ movq(rdx, rax); | |
| 406 __ RecordWriteContextSlot( | |
| 407 rcx, | |
| 408 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), | |
| 409 rdx, | |
| 410 rbx, | |
| 411 kDontSaveFPRegs); | |
| 412 | |
| 413 // Return and remove the on-stack parameter. | |
| 414 __ ret(1 * kPointerSize); | |
| 415 | |
| 416 __ bind(&restore); | |
| 417 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | |
| 418 __ jmp(&install_unoptimized); | |
| 419 | |
| 420 // Create a new closure through the slower runtime call. | |
| 421 __ bind(&gc); | |
| 422 __ PopReturnAddressTo(rcx); | |
| 423 __ pop(rdx); | |
| 424 __ push(rsi); | |
| 425 __ push(rdx); | |
| 426 __ PushRoot(Heap::kFalseValueRootIndex); | |
| 427 __ PushReturnAddressFrom(rcx); | |
| 428 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | |
| 429 } | |
| 430 | |
| 431 | |
| 432 void FastNewContextStub::Generate(MacroAssembler* masm) { | 309 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 433 // Try to allocate the context in new space. | 310 // Try to allocate the context in new space. |
| 434 Label gc; | 311 Label gc; |
| 435 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 312 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 436 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, | 313 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
| 437 rax, rbx, rcx, &gc, TAG_OBJECT); | 314 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 438 | 315 |
| 439 // Get the function from the stack. | 316 // Get the function from the stack. |
| 440 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 317 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 441 | 318 |
| (...skipping 6296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6738 __ bind(&fast_elements_case); | 6615 __ bind(&fast_elements_case); |
| 6739 GenerateCase(masm, FAST_ELEMENTS); | 6616 GenerateCase(masm, FAST_ELEMENTS); |
| 6740 } | 6617 } |
| 6741 | 6618 |
| 6742 | 6619 |
| 6743 #undef __ | 6620 #undef __ |
| 6744 | 6621 |
| 6745 } } // namespace v8::internal | 6622 } } // namespace v8::internal |
| 6746 | 6623 |
| 6747 #endif // V8_TARGET_ARCH_X64 | 6624 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |