OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stubs.h" | 5 #include "src/code-stubs.h" |
6 | 6 |
7 #include <sstream> | 7 #include <sstream> |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 4171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4182 const { | 4182 const { |
4183 return VectorStoreTransitionDescriptor(isolate()); | 4183 return VectorStoreTransitionDescriptor(isolate()); |
4184 } | 4184 } |
4185 | 4185 |
4186 | 4186 |
4187 CallInterfaceDescriptor | 4187 CallInterfaceDescriptor |
4188 ElementsTransitionAndStoreStub::GetCallInterfaceDescriptor() const { | 4188 ElementsTransitionAndStoreStub::GetCallInterfaceDescriptor() const { |
4189 return VectorStoreTransitionDescriptor(isolate()); | 4189 return VectorStoreTransitionDescriptor(isolate()); |
4190 } | 4190 } |
4191 | 4191 |
4192 void FastNewClosureStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {} | |
4193 | |
4194 void FastNewContextStub::InitializeDescriptor(CodeStubDescriptor* d) {} | 4192 void FastNewContextStub::InitializeDescriptor(CodeStubDescriptor* d) {} |
4195 | 4193 |
4196 | |
4197 void TypeofStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {} | 4194 void TypeofStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {} |
4198 | 4195 |
4199 | |
4200 void NumberToStringStub::InitializeDescriptor(CodeStubDescriptor* descriptor) { | 4196 void NumberToStringStub::InitializeDescriptor(CodeStubDescriptor* descriptor) { |
4201 descriptor->Initialize( | 4197 descriptor->Initialize( |
4202 Runtime::FunctionForId(Runtime::kNumberToString)->entry); | 4198 Runtime::FunctionForId(Runtime::kNumberToString)->entry); |
4203 } | 4199 } |
4204 | 4200 |
4205 | 4201 |
4206 void FastCloneRegExpStub::InitializeDescriptor(CodeStubDescriptor* descriptor) { | 4202 void FastCloneRegExpStub::InitializeDescriptor(CodeStubDescriptor* descriptor) { |
4207 FastCloneRegExpDescriptor call_descriptor(isolate()); | 4203 FastCloneRegExpDescriptor call_descriptor(isolate()); |
4208 descriptor->Initialize( | 4204 descriptor->Initialize( |
4209 Runtime::FunctionForId(Runtime::kCreateRegExpLiteral)->entry); | 4205 Runtime::FunctionForId(Runtime::kCreateRegExpLiteral)->entry); |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4418 { | 4414 { |
4419 result.Bind( | 4415 result.Bind( |
4420 assembler->CallRuntime(Runtime::kHasProperty, context, key, object)); | 4416 assembler->CallRuntime(Runtime::kHasProperty, context, key, object)); |
4421 assembler->Goto(&end); | 4417 assembler->Goto(&end); |
4422 } | 4418 } |
4423 | 4419 |
4424 assembler->Bind(&end); | 4420 assembler->Bind(&end); |
4425 return result.value(); | 4421 return result.value(); |
4426 } | 4422 } |
4427 | 4423 |
4424 // static | |
4425 compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler, | |
4426 compiler::Node* shared_info, | |
4427 compiler::Node* context) { | |
4428 typedef compiler::Node Node; | |
4429 typedef compiler::CodeAssembler::Label Label; | |
4430 typedef compiler::CodeAssembler::Variable Variable; | |
4431 | |
4432 Isolate* isolate = assembler->isolate(); | |
4433 Factory* factory = assembler->isolate()->factory(); | |
4434 assembler->IncrementCounter(isolate->counters()->fast_new_closure_total(), 1); | |
4435 | |
4436 // Create a new closure from the given function info in new space | |
4437 Node* result = assembler->Allocate(JSFunction::kSize); | |
4438 | |
4439 // Calculate the index of the map we should install on the function based on | |
4440 // the FunctionKind and LanguageMode of the function. | |
4441 // Note: Must be kept in sync with Context::FunctionMapIndex | |
4442 Node* compiler_hints = assembler->LoadObjectField( | |
4443 shared_info, SharedFunctionInfo::kCompilerHintsOffset, | |
4444 MachineType::Uint32()); | |
4445 Node* is_strict = assembler->Word32And( | |
4446 compiler_hints, | |
4447 assembler->Int32Constant(1 << SharedFunctionInfo::kStrictModeBit)); | |
4448 | |
4449 Label if_normal(assembler), if_generator(assembler), if_async(assembler), | |
4450 if_class_constructor(assembler), if_function_without_prototype(assembler), | |
4451 load_map(assembler); | |
4452 Variable map_index(assembler, MachineRepresentation::kTagged); | |
4453 | |
4454 Node* is_not_normal = assembler->Word32And( | |
4455 compiler_hints, | |
4456 assembler->Int32Constant(SharedFunctionInfo::FunctionKindBits::kMask)); | |
4457 assembler->GotoUnless(is_not_normal, &if_normal); | |
4458 | |
4459 Node* is_generator = assembler->Word32And( | |
4460 compiler_hints, | |
4461 assembler->Int32Constant(1 << SharedFunctionInfo::kIsGeneratorBit)); | |
4462 assembler->GotoIf(is_generator, &if_generator); | |
4463 | |
4464 Node* is_async = assembler->Word32And( | |
4465 compiler_hints, | |
4466 assembler->Int32Constant(1 << SharedFunctionInfo::kIsAsyncFunctionBit)); | |
4467 assembler->GotoIf(is_async, &if_async); | |
4468 | |
4469 Node* is_class_constructor = assembler->Word32And( | |
4470 compiler_hints, | |
4471 assembler->Int32Constant(SharedFunctionInfo::kClassConstructorBits)); | |
4472 assembler->GotoIf(is_class_constructor, &if_class_constructor); | |
4473 | |
4474 if (FLAG_debug_code) { | |
4475 // Function must be a function without a prototype. | |
4476 assembler->Assert(assembler->Word32And( | |
4477 compiler_hints, assembler->Int32Constant( | |
4478 SharedFunctionInfo::kAccessorFunctionBits | | |
4479 (1 << SharedFunctionInfo::kIsArrowBit) | | |
4480 (1 << SharedFunctionInfo::kIsConciseMethodBit)))); | |
4481 } | |
4482 assembler->Goto(&if_function_without_prototype); | |
4483 | |
4484 assembler->Bind(&if_normal); | |
4485 { | |
4486 map_index.Bind(assembler->Select( | |
4487 is_strict, assembler->Int32Constant(Context::STRICT_FUNCTION_MAP_INDEX), | |
4488 assembler->Int32Constant(Context::SLOPPY_FUNCTION_MAP_INDEX))); | |
4489 assembler->Goto(&load_map); | |
4490 } | |
4491 | |
4492 assembler->Bind(&if_generator); | |
4493 { | |
4494 map_index.Bind(assembler->Select( | |
4495 is_strict, | |
4496 assembler->Int32Constant(Context::STRICT_GENERATOR_FUNCTION_MAP_INDEX), | |
4497 assembler->Int32Constant( | |
4498 Context::SLOPPY_GENERATOR_FUNCTION_MAP_INDEX))); | |
4499 assembler->Goto(&load_map); | |
4500 } | |
4501 | |
4502 assembler->Bind(&if_async); | |
4503 { | |
4504 map_index.Bind(assembler->Select( | |
4505 is_strict, | |
4506 assembler->Int32Constant(Context::STRICT_ASYNC_FUNCTION_MAP_INDEX), | |
4507 assembler->Int32Constant(Context::SLOPPY_ASYNC_FUNCTION_MAP_INDEX))); | |
4508 assembler->Goto(&load_map); | |
4509 } | |
4510 | |
4511 assembler->Bind(&if_class_constructor); | |
4512 { | |
4513 map_index.Bind( | |
4514 assembler->Int32Constant(Context::STRICT_FUNCTION_MAP_INDEX)); | |
4515 assembler->Goto(&load_map); | |
4516 } | |
4517 | |
4518 assembler->Bind(&if_function_without_prototype); | |
4519 { | |
4520 map_index.Bind(assembler->Int32Constant( | |
4521 Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX)); | |
4522 assembler->Goto(&load_map); | |
4523 } | |
4524 | |
4525 assembler->Bind(&load_map); | |
4526 | |
4527 // Get the function map in the current native context and set that | |
4528 // as the map of the allocated object. | |
4529 Node* native_context = assembler->LoadNativeContext(context); | |
4530 Node* map_slot_value = | |
4531 assembler->LoadFixedArrayElement(native_context, map_index.value()); | |
4532 assembler->StoreMapNoWriteBarrier(result, map_slot_value); | |
4533 | |
4534 // Initialize the rest of the function. | |
4535 Node* empty_fixed_array = | |
4536 assembler->HeapConstant(factory->empty_fixed_array()); | |
4537 Node* empty_literals_array = | |
4538 assembler->HeapConstant(factory->empty_literals_array()); | |
4539 assembler->StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset, | |
4540 empty_fixed_array); | |
4541 assembler->StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset, | |
4542 empty_fixed_array); | |
4543 assembler->StoreObjectFieldNoWriteBarrier(result, JSFunction::kLiteralsOffset, | |
4544 empty_literals_array); | |
4545 assembler->StoreObjectFieldNoWriteBarrier( | |
4546 result, JSFunction::kPrototypeOrInitialMapOffset, | |
4547 assembler->TheHoleConstant()); | |
4548 assembler->StoreObjectFieldNoWriteBarrier( | |
4549 result, JSFunction::kSharedFunctionInfoOffset, shared_info); | |
4550 assembler->StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, | |
4551 context); | |
4552 | |
4553 // TODO(rmcilroy): Should we set the code entry from the SharedFunctionInfo | |
Benedikt Meurer
2016/06/29 17:42:16
We want to go to optimized code if it's already av
rmcilroy
2016/06/29 21:11:48
Ahh yes, of course. Removed this TODO.
Is there a
Benedikt Meurer
2016/06/30 03:30:15
CompileLazy doesn't do a round trip to the runtime
| |
4554 // instead? For eager compilation this would seem preferable. | |
4555 Handle<Code> lazy_builtin_handle( | |
4556 assembler->isolate()->builtins()->builtin(Builtins::kCompileLazy)); | |
4557 Node* lazy_builtin = assembler->HeapConstant(lazy_builtin_handle); | |
4558 Node* lazy_builtin_entry = assembler->IntPtrAdd( | |
4559 lazy_builtin, | |
4560 assembler->IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); | |
4561 assembler->StoreObjectFieldNoWriteBarrier( | |
4562 result, JSFunction::kCodeEntryOffset, lazy_builtin_entry); | |
4563 assembler->StoreObjectFieldNoWriteBarrier(result, | |
4564 JSFunction::kNextFunctionLinkOffset, | |
4565 assembler->UndefinedConstant()); | |
4566 | |
4567 return result; | |
4568 } | |
4569 | |
4570 void FastNewClosureStub::GenerateAssembly(CodeStubAssembler* assembler) const { | |
4571 assembler->Return( | |
4572 Generate(assembler, assembler->Parameter(0), assembler->Parameter(1))); | |
4573 } | |
4574 | |
4428 void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) { | 4575 void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) { |
4429 CreateAllocationSiteStub stub(isolate); | 4576 CreateAllocationSiteStub stub(isolate); |
4430 stub.GetCode(); | 4577 stub.GetCode(); |
4431 } | 4578 } |
4432 | 4579 |
4433 | 4580 |
4434 void CreateWeakCellStub::GenerateAheadOfTime(Isolate* isolate) { | 4581 void CreateWeakCellStub::GenerateAheadOfTime(Isolate* isolate) { |
4435 CreateWeakCellStub stub(isolate); | 4582 CreateWeakCellStub stub(isolate); |
4436 stub.GetCode(); | 4583 stub.GetCode(); |
4437 } | 4584 } |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4730 if (type->Is(Type::UntaggedPointer())) { | 4877 if (type->Is(Type::UntaggedPointer())) { |
4731 return Representation::External(); | 4878 return Representation::External(); |
4732 } | 4879 } |
4733 | 4880 |
4734 DCHECK(!type->Is(Type::Untagged())); | 4881 DCHECK(!type->Is(Type::Untagged())); |
4735 return Representation::Tagged(); | 4882 return Representation::Tagged(); |
4736 } | 4883 } |
4737 | 4884 |
4738 } // namespace internal | 4885 } // namespace internal |
4739 } // namespace v8 | 4886 } // namespace v8 |
OLD | NEW |