| OLD | NEW |
| (Empty) |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/builtins/builtins-constructor.h" | |
| 6 #include "src/ast/ast.h" | |
| 7 #include "src/builtins/builtins-utils.h" | |
| 8 #include "src/builtins/builtins.h" | |
| 9 #include "src/code-factory.h" | |
| 10 #include "src/code-stub-assembler.h" | |
| 11 #include "src/counters.h" | |
| 12 #include "src/interface-descriptors.h" | |
| 13 #include "src/objects-inl.h" | |
| 14 | |
| 15 namespace v8 { | |
| 16 namespace internal { | |
| 17 | |
| 18 typedef compiler::Node Node; | |
| 19 | |
| 20 Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info, | |
| 21 Node* feedback_vector, | |
| 22 Node* slot, | |
| 23 Node* context) { | |
| 24 typedef compiler::CodeAssembler::Label Label; | |
| 25 typedef compiler::CodeAssembler::Variable Variable; | |
| 26 | |
| 27 Isolate* isolate = this->isolate(); | |
| 28 Factory* factory = isolate->factory(); | |
| 29 IncrementCounter(isolate->counters()->fast_new_closure_total(), 1); | |
| 30 | |
| 31 // Create a new closure from the given function info in new space | |
| 32 Node* result = Allocate(JSFunction::kSize); | |
| 33 | |
| 34 // Calculate the index of the map we should install on the function based on | |
| 35 // the FunctionKind and LanguageMode of the function. | |
| 36 // Note: Must be kept in sync with Context::FunctionMapIndex | |
| 37 Node* compiler_hints = | |
| 38 LoadObjectField(shared_info, SharedFunctionInfo::kCompilerHintsOffset, | |
| 39 MachineType::Uint32()); | |
| 40 Node* is_strict = Word32And( | |
| 41 compiler_hints, Int32Constant(1 << SharedFunctionInfo::kStrictModeBit)); | |
| 42 | |
| 43 Label if_normal(this), if_generator(this), if_async(this), | |
| 44 if_class_constructor(this), if_function_without_prototype(this), | |
| 45 load_map(this); | |
| 46 Variable map_index(this, MachineType::PointerRepresentation()); | |
| 47 | |
| 48 STATIC_ASSERT(FunctionKind::kNormalFunction == 0); | |
| 49 Node* is_not_normal = | |
| 50 Word32And(compiler_hints, | |
| 51 Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask)); | |
| 52 GotoIfNot(is_not_normal, &if_normal); | |
| 53 | |
| 54 Node* is_generator = Word32And( | |
| 55 compiler_hints, Int32Constant(FunctionKind::kGeneratorFunction | |
| 56 << SharedFunctionInfo::kFunctionKindShift)); | |
| 57 GotoIf(is_generator, &if_generator); | |
| 58 | |
| 59 Node* is_async = Word32And( | |
| 60 compiler_hints, Int32Constant(FunctionKind::kAsyncFunction | |
| 61 << SharedFunctionInfo::kFunctionKindShift)); | |
| 62 GotoIf(is_async, &if_async); | |
| 63 | |
| 64 Node* is_class_constructor = Word32And( | |
| 65 compiler_hints, Int32Constant(FunctionKind::kClassConstructor | |
| 66 << SharedFunctionInfo::kFunctionKindShift)); | |
| 67 GotoIf(is_class_constructor, &if_class_constructor); | |
| 68 | |
| 69 if (FLAG_debug_code) { | |
| 70 // Function must be a function without a prototype. | |
| 71 CSA_ASSERT( | |
| 72 this, | |
| 73 Word32And(compiler_hints, | |
| 74 Int32Constant((FunctionKind::kAccessorFunction | | |
| 75 FunctionKind::kArrowFunction | | |
| 76 FunctionKind::kConciseMethod) | |
| 77 << SharedFunctionInfo::kFunctionKindShift))); | |
| 78 } | |
| 79 Goto(&if_function_without_prototype); | |
| 80 | |
| 81 Bind(&if_normal); | |
| 82 { | |
| 83 map_index.Bind(SelectIntPtrConstant(is_strict, | |
| 84 Context::STRICT_FUNCTION_MAP_INDEX, | |
| 85 Context::SLOPPY_FUNCTION_MAP_INDEX)); | |
| 86 Goto(&load_map); | |
| 87 } | |
| 88 | |
| 89 Bind(&if_generator); | |
| 90 { | |
| 91 map_index.Bind(IntPtrConstant(Context::GENERATOR_FUNCTION_MAP_INDEX)); | |
| 92 Goto(&load_map); | |
| 93 } | |
| 94 | |
| 95 Bind(&if_async); | |
| 96 { | |
| 97 map_index.Bind(IntPtrConstant(Context::ASYNC_FUNCTION_MAP_INDEX)); | |
| 98 Goto(&load_map); | |
| 99 } | |
| 100 | |
| 101 Bind(&if_class_constructor); | |
| 102 { | |
| 103 map_index.Bind(IntPtrConstant(Context::CLASS_FUNCTION_MAP_INDEX)); | |
| 104 Goto(&load_map); | |
| 105 } | |
| 106 | |
| 107 Bind(&if_function_without_prototype); | |
| 108 { | |
| 109 map_index.Bind( | |
| 110 IntPtrConstant(Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX)); | |
| 111 Goto(&load_map); | |
| 112 } | |
| 113 | |
| 114 Bind(&load_map); | |
| 115 | |
| 116 // Get the function map in the current native context and set that | |
| 117 // as the map of the allocated object. | |
| 118 Node* native_context = LoadNativeContext(context); | |
| 119 Node* map_slot_value = | |
| 120 LoadFixedArrayElement(native_context, map_index.value()); | |
| 121 StoreMapNoWriteBarrier(result, map_slot_value); | |
| 122 | |
| 123 // Initialize the rest of the function. | |
| 124 Node* empty_fixed_array = HeapConstant(factory->empty_fixed_array()); | |
| 125 StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset, | |
| 126 empty_fixed_array); | |
| 127 StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset, | |
| 128 empty_fixed_array); | |
| 129 Node* literals_cell = LoadFixedArrayElement( | |
| 130 feedback_vector, slot, 0, CodeStubAssembler::SMI_PARAMETERS); | |
| 131 { | |
| 132 // Bump the closure counter encoded in the cell's map. | |
| 133 Node* cell_map = LoadMap(literals_cell); | |
| 134 Label no_closures(this), one_closure(this), cell_done(this); | |
| 135 | |
| 136 GotoIf(IsNoClosuresCellMap(cell_map), &no_closures); | |
| 137 GotoIf(IsOneClosureCellMap(cell_map), &one_closure); | |
| 138 CSA_ASSERT(this, IsManyClosuresCellMap(cell_map)); | |
| 139 Goto(&cell_done); | |
| 140 | |
| 141 Bind(&no_closures); | |
| 142 StoreMapNoWriteBarrier(literals_cell, Heap::kOneClosureCellMapRootIndex); | |
| 143 Goto(&cell_done); | |
| 144 | |
| 145 Bind(&one_closure); | |
| 146 StoreMapNoWriteBarrier(literals_cell, Heap::kManyClosuresCellMapRootIndex); | |
| 147 Goto(&cell_done); | |
| 148 | |
| 149 Bind(&cell_done); | |
| 150 } | |
| 151 StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackVectorOffset, | |
| 152 literals_cell); | |
| 153 StoreObjectFieldNoWriteBarrier( | |
| 154 result, JSFunction::kPrototypeOrInitialMapOffset, TheHoleConstant()); | |
| 155 StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset, | |
| 156 shared_info); | |
| 157 StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context); | |
| 158 Handle<Code> lazy_builtin_handle( | |
| 159 isolate->builtins()->builtin(Builtins::kCompileLazy)); | |
| 160 Node* lazy_builtin = HeapConstant(lazy_builtin_handle); | |
| 161 Node* lazy_builtin_entry = | |
| 162 IntPtrAdd(BitcastTaggedToWord(lazy_builtin), | |
| 163 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); | |
| 164 StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeEntryOffset, | |
| 165 lazy_builtin_entry, | |
| 166 MachineType::PointerRepresentation()); | |
| 167 StoreObjectFieldNoWriteBarrier(result, JSFunction::kNextFunctionLinkOffset, | |
| 168 UndefinedConstant()); | |
| 169 | |
| 170 return result; | |
| 171 } | |
| 172 | |
| 173 TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) { | |
| 174 Node* shared = Parameter(FastNewClosureDescriptor::kSharedFunctionInfo); | |
| 175 Node* context = Parameter(FastNewClosureDescriptor::kContext); | |
| 176 Node* vector = Parameter(FastNewClosureDescriptor::kVector); | |
| 177 Node* slot = Parameter(FastNewClosureDescriptor::kSlot); | |
| 178 Return(EmitFastNewClosure(shared, vector, slot, context)); | |
| 179 } | |
| 180 | |
| 181 TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) { | |
| 182 typedef FastNewObjectDescriptor Descriptor; | |
| 183 Node* context = Parameter(Descriptor::kContext); | |
| 184 Node* target = Parameter(Descriptor::kTarget); | |
| 185 Node* new_target = Parameter(Descriptor::kNewTarget); | |
| 186 | |
| 187 Label call_runtime(this); | |
| 188 | |
| 189 Node* result = EmitFastNewObject(context, target, new_target, &call_runtime); | |
| 190 Return(result); | |
| 191 | |
| 192 Bind(&call_runtime); | |
| 193 TailCallRuntime(Runtime::kNewObject, context, target, new_target); | |
| 194 } | |
| 195 | |
| 196 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context, | |
| 197 Node* target, | |
| 198 Node* new_target) { | |
| 199 Variable var_obj(this, MachineRepresentation::kTagged); | |
| 200 Label call_runtime(this), end(this); | |
| 201 | |
| 202 Node* result = EmitFastNewObject(context, target, new_target, &call_runtime); | |
| 203 var_obj.Bind(result); | |
| 204 Goto(&end); | |
| 205 | |
| 206 Bind(&call_runtime); | |
| 207 var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target)); | |
| 208 Goto(&end); | |
| 209 | |
| 210 Bind(&end); | |
| 211 return var_obj.value(); | |
| 212 } | |
| 213 | |
| 214 Node* ConstructorBuiltinsAssembler::EmitFastNewObject( | |
| 215 Node* context, Node* target, Node* new_target, | |
| 216 CodeAssemblerLabel* call_runtime) { | |
| 217 CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE)); | |
| 218 CSA_ASSERT(this, IsJSReceiver(new_target)); | |
| 219 | |
| 220 // Verify that the new target is a JSFunction. | |
| 221 Label fast(this), end(this); | |
| 222 GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast); | |
| 223 Goto(call_runtime); | |
| 224 | |
| 225 Bind(&fast); | |
| 226 | |
| 227 // Load the initial map and verify that it's in fact a map. | |
| 228 Node* initial_map = | |
| 229 LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset); | |
| 230 GotoIf(TaggedIsSmi(initial_map), call_runtime); | |
| 231 GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime); | |
| 232 | |
| 233 // Fall back to runtime if the target differs from the new target's | |
| 234 // initial map constructor. | |
| 235 Node* new_target_constructor = | |
| 236 LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset); | |
| 237 GotoIf(WordNotEqual(target, new_target_constructor), call_runtime); | |
| 238 | |
| 239 Variable properties(this, MachineRepresentation::kTagged); | |
| 240 | |
| 241 Label instantiate_map(this), allocate_properties(this); | |
| 242 GotoIf(IsDictionaryMap(initial_map), &allocate_properties); | |
| 243 { | |
| 244 properties.Bind(EmptyFixedArrayConstant()); | |
| 245 Goto(&instantiate_map); | |
| 246 } | |
| 247 Bind(&allocate_properties); | |
| 248 { | |
| 249 properties.Bind(AllocateNameDictionary(NameDictionary::kInitialCapacity)); | |
| 250 Goto(&instantiate_map); | |
| 251 } | |
| 252 | |
| 253 Bind(&instantiate_map); | |
| 254 | |
| 255 Node* object = AllocateJSObjectFromMap(initial_map, properties.value()); | |
| 256 | |
| 257 Node* instance_size_words = ChangeUint32ToWord(LoadObjectField( | |
| 258 initial_map, Map::kInstanceSizeOffset, MachineType::Uint8())); | |
| 259 Node* instance_size = | |
| 260 WordShl(instance_size_words, IntPtrConstant(kPointerSizeLog2)); | |
| 261 | |
| 262 // Perform in-object slack tracking if requested. | |
| 263 Node* bit_field3 = LoadMapBitField3(initial_map); | |
| 264 Label slack_tracking(this), finalize(this, Label::kDeferred), done(this); | |
| 265 GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking); | |
| 266 | |
| 267 // Initialize remaining fields. | |
| 268 { | |
| 269 Comment("no slack tracking"); | |
| 270 InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize), | |
| 271 instance_size, Heap::kUndefinedValueRootIndex); | |
| 272 Goto(&end); | |
| 273 } | |
| 274 | |
| 275 { | |
| 276 Bind(&slack_tracking); | |
| 277 | |
| 278 // Decrease generous allocation count. | |
| 279 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32); | |
| 280 Comment("update allocation count"); | |
| 281 Node* new_bit_field3 = Int32Sub( | |
| 282 bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift)); | |
| 283 StoreObjectFieldNoWriteBarrier(initial_map, Map::kBitField3Offset, | |
| 284 new_bit_field3, | |
| 285 MachineRepresentation::kWord32); | |
| 286 GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &finalize); | |
| 287 | |
| 288 Node* unused_fields = LoadObjectField( | |
| 289 initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8()); | |
| 290 Node* used_size = | |
| 291 IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields), | |
| 292 IntPtrConstant(kPointerSizeLog2))); | |
| 293 | |
| 294 Comment("initialize filler fields (no finalize)"); | |
| 295 InitializeFieldsWithRoot(object, used_size, instance_size, | |
| 296 Heap::kOnePointerFillerMapRootIndex); | |
| 297 | |
| 298 Comment("initialize undefined fields (no finalize)"); | |
| 299 InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize), | |
| 300 used_size, Heap::kUndefinedValueRootIndex); | |
| 301 Goto(&end); | |
| 302 } | |
| 303 | |
| 304 { | |
| 305 // Finalize the instance size. | |
| 306 Bind(&finalize); | |
| 307 | |
| 308 Node* unused_fields = LoadObjectField( | |
| 309 initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8()); | |
| 310 Node* used_size = | |
| 311 IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields), | |
| 312 IntPtrConstant(kPointerSizeLog2))); | |
| 313 | |
| 314 Comment("initialize filler fields (finalize)"); | |
| 315 InitializeFieldsWithRoot(object, used_size, instance_size, | |
| 316 Heap::kOnePointerFillerMapRootIndex); | |
| 317 | |
| 318 Comment("initialize undefined fields (finalize)"); | |
| 319 InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize), | |
| 320 used_size, Heap::kUndefinedValueRootIndex); | |
| 321 | |
| 322 CallRuntime(Runtime::kFinalizeInstanceSize, context, initial_map); | |
| 323 Goto(&end); | |
| 324 } | |
| 325 | |
| 326 Bind(&end); | |
| 327 return object; | |
| 328 } | |
| 329 | |
| 330 Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext( | |
| 331 Node* function, Node* slots, Node* context, ScopeType scope_type) { | |
| 332 slots = ChangeUint32ToWord(slots); | |
| 333 | |
| 334 // TODO(ishell): Use CSA::OptimalParameterMode() here. | |
| 335 CodeStubAssembler::ParameterMode mode = CodeStubAssembler::INTPTR_PARAMETERS; | |
| 336 Node* min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS); | |
| 337 Node* length = IntPtrAdd(slots, min_context_slots); | |
| 338 Node* size = GetFixedArrayAllocationSize(length, FAST_ELEMENTS, mode); | |
| 339 | |
| 340 // Create a new closure from the given function info in new space | |
| 341 Node* function_context = AllocateInNewSpace(size); | |
| 342 | |
| 343 Heap::RootListIndex context_type; | |
| 344 switch (scope_type) { | |
| 345 case EVAL_SCOPE: | |
| 346 context_type = Heap::kEvalContextMapRootIndex; | |
| 347 break; | |
| 348 case FUNCTION_SCOPE: | |
| 349 context_type = Heap::kFunctionContextMapRootIndex; | |
| 350 break; | |
| 351 default: | |
| 352 UNREACHABLE(); | |
| 353 } | |
| 354 StoreMapNoWriteBarrier(function_context, context_type); | |
| 355 StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset, | |
| 356 SmiTag(length)); | |
| 357 | |
| 358 // Set up the fixed slots. | |
| 359 StoreFixedArrayElement(function_context, Context::CLOSURE_INDEX, function, | |
| 360 SKIP_WRITE_BARRIER); | |
| 361 StoreFixedArrayElement(function_context, Context::PREVIOUS_INDEX, context, | |
| 362 SKIP_WRITE_BARRIER); | |
| 363 StoreFixedArrayElement(function_context, Context::EXTENSION_INDEX, | |
| 364 TheHoleConstant(), SKIP_WRITE_BARRIER); | |
| 365 | |
| 366 // Copy the native context from the previous context. | |
| 367 Node* native_context = LoadNativeContext(context); | |
| 368 StoreFixedArrayElement(function_context, Context::NATIVE_CONTEXT_INDEX, | |
| 369 native_context, SKIP_WRITE_BARRIER); | |
| 370 | |
| 371 // Initialize the rest of the slots to undefined. | |
| 372 Node* undefined = UndefinedConstant(); | |
| 373 BuildFastFixedArrayForEach( | |
| 374 function_context, FAST_ELEMENTS, min_context_slots, length, | |
| 375 [this, undefined](Node* context, Node* offset) { | |
| 376 StoreNoWriteBarrier(MachineRepresentation::kTagged, context, offset, | |
| 377 undefined); | |
| 378 }, | |
| 379 mode); | |
| 380 | |
| 381 return function_context; | |
| 382 } | |
| 383 | |
| 384 // static | |
| 385 int ConstructorBuiltinsAssembler::MaximumFunctionContextSlots() { | |
| 386 return FLAG_test_small_max_function_context_stub_size ? kSmallMaximumSlots | |
| 387 : kMaximumSlots; | |
| 388 } | |
| 389 | |
| 390 TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) { | |
| 391 Node* function = Parameter(FastNewFunctionContextDescriptor::kFunction); | |
| 392 Node* slots = Parameter(FastNewFunctionContextDescriptor::kSlots); | |
| 393 Node* context = Parameter(FastNewFunctionContextDescriptor::kContext); | |
| 394 Return(EmitFastNewFunctionContext(function, slots, context, | |
| 395 ScopeType::EVAL_SCOPE)); | |
| 396 } | |
| 397 | |
| 398 TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) { | |
| 399 Node* function = Parameter(FastNewFunctionContextDescriptor::kFunction); | |
| 400 Node* slots = Parameter(FastNewFunctionContextDescriptor::kSlots); | |
| 401 Node* context = Parameter(FastNewFunctionContextDescriptor::kContext); | |
| 402 Return(EmitFastNewFunctionContext(function, slots, context, | |
| 403 ScopeType::FUNCTION_SCOPE)); | |
| 404 } | |
| 405 | |
| 406 Handle<Code> Builtins::NewFunctionContext(ScopeType scope_type) { | |
| 407 switch (scope_type) { | |
| 408 case ScopeType::EVAL_SCOPE: | |
| 409 return FastNewFunctionContextEval(); | |
| 410 case ScopeType::FUNCTION_SCOPE: | |
| 411 return FastNewFunctionContextFunction(); | |
| 412 default: | |
| 413 UNREACHABLE(); | |
| 414 } | |
| 415 return Handle<Code>::null(); | |
| 416 } | |
| 417 | |
| 418 Node* ConstructorBuiltinsAssembler::EmitFastCloneRegExp(Node* closure, | |
| 419 Node* literal_index, | |
| 420 Node* pattern, | |
| 421 Node* flags, | |
| 422 Node* context) { | |
| 423 typedef CodeStubAssembler::Label Label; | |
| 424 typedef CodeStubAssembler::Variable Variable; | |
| 425 typedef compiler::Node Node; | |
| 426 | |
| 427 Label call_runtime(this, Label::kDeferred), end(this); | |
| 428 | |
| 429 Variable result(this, MachineRepresentation::kTagged); | |
| 430 | |
| 431 Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset); | |
| 432 Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset); | |
| 433 Node* boilerplate = LoadFixedArrayElement(feedback_vector, literal_index, 0, | |
| 434 CodeStubAssembler::SMI_PARAMETERS); | |
| 435 GotoIf(IsUndefined(boilerplate), &call_runtime); | |
| 436 | |
| 437 { | |
| 438 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | |
| 439 Node* copy = Allocate(size); | |
| 440 for (int offset = 0; offset < size; offset += kPointerSize) { | |
| 441 Node* value = LoadObjectField(boilerplate, offset); | |
| 442 StoreObjectFieldNoWriteBarrier(copy, offset, value); | |
| 443 } | |
| 444 result.Bind(copy); | |
| 445 Goto(&end); | |
| 446 } | |
| 447 | |
| 448 Bind(&call_runtime); | |
| 449 { | |
| 450 result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context, closure, | |
| 451 literal_index, pattern, flags)); | |
| 452 Goto(&end); | |
| 453 } | |
| 454 | |
| 455 Bind(&end); | |
| 456 return result.value(); | |
| 457 } | |
| 458 | |
| 459 TF_BUILTIN(FastCloneRegExp, ConstructorBuiltinsAssembler) { | |
| 460 Node* closure = Parameter(FastCloneRegExpDescriptor::kClosure); | |
| 461 Node* literal_index = Parameter(FastCloneRegExpDescriptor::kLiteralIndex); | |
| 462 Node* pattern = Parameter(FastCloneRegExpDescriptor::kPattern); | |
| 463 Node* flags = Parameter(FastCloneRegExpDescriptor::kFlags); | |
| 464 Node* context = Parameter(FastCloneRegExpDescriptor::kContext); | |
| 465 | |
| 466 Return(EmitFastCloneRegExp(closure, literal_index, pattern, flags, context)); | |
| 467 } | |
| 468 | |
| 469 Node* ConstructorBuiltinsAssembler::NonEmptyShallowClone( | |
| 470 Node* boilerplate, Node* boilerplate_map, Node* boilerplate_elements, | |
| 471 Node* allocation_site, Node* capacity, ElementsKind kind) { | |
| 472 typedef CodeStubAssembler::ParameterMode ParameterMode; | |
| 473 | |
| 474 ParameterMode param_mode = OptimalParameterMode(); | |
| 475 | |
| 476 Node* length = LoadJSArrayLength(boilerplate); | |
| 477 capacity = TaggedToParameter(capacity, param_mode); | |
| 478 | |
| 479 Node *array, *elements; | |
| 480 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements( | |
| 481 kind, boilerplate_map, length, allocation_site, capacity, param_mode); | |
| 482 | |
| 483 Comment("copy elements header"); | |
| 484 // Header consists of map and length. | |
| 485 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); | |
| 486 StoreMap(elements, LoadMap(boilerplate_elements)); | |
| 487 { | |
| 488 int offset = FixedArrayBase::kLengthOffset; | |
| 489 StoreObjectFieldNoWriteBarrier( | |
| 490 elements, offset, LoadObjectField(boilerplate_elements, offset)); | |
| 491 } | |
| 492 | |
| 493 length = TaggedToParameter(length, param_mode); | |
| 494 | |
| 495 Comment("copy boilerplate elements"); | |
| 496 CopyFixedArrayElements(kind, boilerplate_elements, elements, length, | |
| 497 SKIP_WRITE_BARRIER, param_mode); | |
| 498 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); | |
| 499 | |
| 500 return array; | |
| 501 } | |
| 502 | |
| 503 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowArray( | |
| 504 Node* closure, Node* literal_index, Node* context, | |
| 505 CodeAssemblerLabel* call_runtime, AllocationSiteMode allocation_site_mode) { | |
| 506 typedef CodeStubAssembler::Label Label; | |
| 507 typedef CodeStubAssembler::Variable Variable; | |
| 508 typedef compiler::Node Node; | |
| 509 | |
| 510 Label zero_capacity(this), cow_elements(this), fast_elements(this), | |
| 511 return_result(this); | |
| 512 Variable result(this, MachineRepresentation::kTagged); | |
| 513 | |
| 514 Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset); | |
| 515 Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset); | |
| 516 Node* allocation_site = LoadFixedArrayElement( | |
| 517 feedback_vector, literal_index, 0, CodeStubAssembler::SMI_PARAMETERS); | |
| 518 | |
| 519 GotoIf(IsUndefined(allocation_site), call_runtime); | |
| 520 allocation_site = LoadFixedArrayElement(feedback_vector, literal_index, 0, | |
| 521 CodeStubAssembler::SMI_PARAMETERS); | |
| 522 | |
| 523 Node* boilerplate = | |
| 524 LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset); | |
| 525 Node* boilerplate_map = LoadMap(boilerplate); | |
| 526 Node* boilerplate_elements = LoadElements(boilerplate); | |
| 527 Node* capacity = LoadFixedArrayBaseLength(boilerplate_elements); | |
| 528 allocation_site = | |
| 529 allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr; | |
| 530 | |
| 531 Node* zero = SmiConstant(Smi::kZero); | |
| 532 GotoIf(SmiEqual(capacity, zero), &zero_capacity); | |
| 533 | |
| 534 Node* elements_map = LoadMap(boilerplate_elements); | |
| 535 GotoIf(IsFixedCOWArrayMap(elements_map), &cow_elements); | |
| 536 | |
| 537 GotoIf(IsFixedArrayMap(elements_map), &fast_elements); | |
| 538 { | |
| 539 Comment("fast double elements path"); | |
| 540 if (FLAG_debug_code) { | |
| 541 Label correct_elements_map(this), abort(this, Label::kDeferred); | |
| 542 Branch(IsFixedDoubleArrayMap(elements_map), &correct_elements_map, | |
| 543 &abort); | |
| 544 | |
| 545 Bind(&abort); | |
| 546 { | |
| 547 Node* abort_id = SmiConstant( | |
| 548 Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap)); | |
| 549 CallRuntime(Runtime::kAbort, context, abort_id); | |
| 550 result.Bind(UndefinedConstant()); | |
| 551 Goto(&return_result); | |
| 552 } | |
| 553 Bind(&correct_elements_map); | |
| 554 } | |
| 555 | |
| 556 Node* array = | |
| 557 NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements, | |
| 558 allocation_site, capacity, FAST_DOUBLE_ELEMENTS); | |
| 559 result.Bind(array); | |
| 560 Goto(&return_result); | |
| 561 } | |
| 562 | |
| 563 Bind(&fast_elements); | |
| 564 { | |
| 565 Comment("fast elements path"); | |
| 566 Node* array = | |
| 567 NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements, | |
| 568 allocation_site, capacity, FAST_ELEMENTS); | |
| 569 result.Bind(array); | |
| 570 Goto(&return_result); | |
| 571 } | |
| 572 | |
| 573 Variable length(this, MachineRepresentation::kTagged), | |
| 574 elements(this, MachineRepresentation::kTagged); | |
| 575 Label allocate_without_elements(this); | |
| 576 | |
| 577 Bind(&cow_elements); | |
| 578 { | |
| 579 Comment("fixed cow path"); | |
| 580 length.Bind(LoadJSArrayLength(boilerplate)); | |
| 581 elements.Bind(boilerplate_elements); | |
| 582 | |
| 583 Goto(&allocate_without_elements); | |
| 584 } | |
| 585 | |
| 586 Bind(&zero_capacity); | |
| 587 { | |
| 588 Comment("zero capacity path"); | |
| 589 length.Bind(zero); | |
| 590 elements.Bind(LoadRoot(Heap::kEmptyFixedArrayRootIndex)); | |
| 591 | |
| 592 Goto(&allocate_without_elements); | |
| 593 } | |
| 594 | |
| 595 Bind(&allocate_without_elements); | |
| 596 { | |
| 597 Node* array = AllocateUninitializedJSArrayWithoutElements( | |
| 598 FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site); | |
| 599 StoreObjectField(array, JSObject::kElementsOffset, elements.value()); | |
| 600 result.Bind(array); | |
| 601 Goto(&return_result); | |
| 602 } | |
| 603 | |
| 604 Bind(&return_result); | |
| 605 return result.value(); | |
| 606 } | |
| 607 | |
| 608 void ConstructorBuiltinsAssembler::CreateFastCloneShallowArrayBuiltin( | |
| 609 AllocationSiteMode allocation_site_mode) { | |
| 610 typedef compiler::Node Node; | |
| 611 typedef CodeStubAssembler::Label Label; | |
| 612 | |
| 613 Node* closure = Parameter(FastCloneShallowArrayDescriptor::kClosure); | |
| 614 Node* literal_index = | |
| 615 Parameter(FastCloneShallowArrayDescriptor::kLiteralIndex); | |
| 616 Node* constant_elements = | |
| 617 Parameter(FastCloneShallowArrayDescriptor::kConstantElements); | |
| 618 Node* context = Parameter(FastCloneShallowArrayDescriptor::kContext); | |
| 619 Label call_runtime(this, Label::kDeferred); | |
| 620 Return(EmitFastCloneShallowArray(closure, literal_index, context, | |
| 621 &call_runtime, allocation_site_mode)); | |
| 622 | |
| 623 Bind(&call_runtime); | |
| 624 { | |
| 625 Comment("call runtime"); | |
| 626 Node* flags = | |
| 627 SmiConstant(Smi::FromInt(ArrayLiteral::kShallowElements | | |
| 628 (allocation_site_mode == TRACK_ALLOCATION_SITE | |
| 629 ? 0 | |
| 630 : ArrayLiteral::kDisableMementos))); | |
| 631 Return(CallRuntime(Runtime::kCreateArrayLiteral, context, closure, | |
| 632 literal_index, constant_elements, flags)); | |
| 633 } | |
| 634 } | |
| 635 | |
| 636 TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) { | |
| 637 CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE); | |
| 638 } | |
| 639 | |
| 640 TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) { | |
| 641 CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE); | |
| 642 } | |
| 643 | |
| 644 Handle<Code> Builtins::NewCloneShallowArray( | |
| 645 AllocationSiteMode allocation_mode) { | |
| 646 switch (allocation_mode) { | |
| 647 case TRACK_ALLOCATION_SITE: | |
| 648 return FastCloneShallowArrayTrack(); | |
| 649 case DONT_TRACK_ALLOCATION_SITE: | |
| 650 return FastCloneShallowArrayDontTrack(); | |
| 651 default: | |
| 652 UNREACHABLE(); | |
| 653 } | |
| 654 return Handle<Code>::null(); | |
| 655 } | |
| 656 | |
| 657 // static | |
| 658 int ConstructorBuiltinsAssembler::FastCloneShallowObjectPropertiesCount( | |
| 659 int literal_length) { | |
| 660 // This heuristic of setting empty literals to have | |
| 661 // kInitialGlobalObjectUnusedPropertiesCount must remain in-sync with the | |
| 662 // runtime. | |
| 663 // TODO(verwaest): Unify this with the heuristic in the runtime. | |
| 664 return literal_length == 0 | |
| 665 ? JSObject::kInitialGlobalObjectUnusedPropertiesCount | |
| 666 : literal_length; | |
| 667 } | |
| 668 | |
| 669 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject( | |
| 670 CodeAssemblerLabel* call_runtime, Node* closure, Node* literals_index, | |
| 671 Node* properties_count) { | |
| 672 Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset); | |
| 673 Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset); | |
| 674 Node* allocation_site = LoadFixedArrayElement( | |
| 675 feedback_vector, literals_index, 0, CodeStubAssembler::SMI_PARAMETERS); | |
| 676 GotoIf(IsUndefined(allocation_site), call_runtime); | |
| 677 | |
| 678 // Calculate the object and allocation size based on the properties count. | |
| 679 Node* object_size = IntPtrAdd(WordShl(properties_count, kPointerSizeLog2), | |
| 680 IntPtrConstant(JSObject::kHeaderSize)); | |
| 681 Node* allocation_size = object_size; | |
| 682 if (FLAG_allocation_site_pretenuring) { | |
| 683 allocation_size = | |
| 684 IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize)); | |
| 685 } | |
| 686 Node* boilerplate = | |
| 687 LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset); | |
| 688 Node* boilerplate_map = LoadMap(boilerplate); | |
| 689 Node* instance_size = LoadMapInstanceSize(boilerplate_map); | |
| 690 Node* size_in_words = WordShr(object_size, kPointerSizeLog2); | |
| 691 GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime); | |
| 692 | |
| 693 Node* copy = AllocateInNewSpace(allocation_size); | |
| 694 | |
| 695 // Copy boilerplate elements. | |
| 696 Variable offset(this, MachineType::PointerRepresentation()); | |
| 697 offset.Bind(IntPtrConstant(-kHeapObjectTag)); | |
| 698 Node* end_offset = IntPtrAdd(object_size, offset.value()); | |
| 699 Label loop_body(this, &offset), loop_check(this, &offset); | |
| 700 // We should always have an object size greater than zero. | |
| 701 Goto(&loop_body); | |
| 702 Bind(&loop_body); | |
| 703 { | |
| 704 // The Allocate above guarantees that the copy lies in new space. This | |
| 705 // allows us to skip write barriers. This is necessary since we may also be | |
| 706 // copying unboxed doubles. | |
| 707 Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value()); | |
| 708 StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy, | |
| 709 offset.value(), field); | |
| 710 Goto(&loop_check); | |
| 711 } | |
| 712 Bind(&loop_check); | |
| 713 { | |
| 714 offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize))); | |
| 715 GotoIfNot(IntPtrGreaterThanOrEqual(offset.value(), end_offset), &loop_body); | |
| 716 } | |
| 717 | |
| 718 if (FLAG_allocation_site_pretenuring) { | |
| 719 Node* memento = InnerAllocate(copy, object_size); | |
| 720 StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex); | |
| 721 StoreObjectFieldNoWriteBarrier( | |
| 722 memento, AllocationMemento::kAllocationSiteOffset, allocation_site); | |
| 723 Node* memento_create_count = LoadObjectField( | |
| 724 allocation_site, AllocationSite::kPretenureCreateCountOffset); | |
| 725 memento_create_count = | |
| 726 SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1))); | |
| 727 StoreObjectFieldNoWriteBarrier(allocation_site, | |
| 728 AllocationSite::kPretenureCreateCountOffset, | |
| 729 memento_create_count); | |
| 730 } | |
| 731 | |
| 732 // TODO(verwaest): Allocate and fill in double boxes. | |
| 733 return copy; | |
| 734 } | |
| 735 | |
| 736 void ConstructorBuiltinsAssembler::CreateFastCloneShallowObjectBuiltin( | |
| 737 int properties_count) { | |
| 738 DCHECK_GE(properties_count, 0); | |
| 739 DCHECK_LE(properties_count, kMaximumClonedShallowObjectProperties); | |
| 740 Label call_runtime(this); | |
| 741 Node* closure = Parameter(0); | |
| 742 Node* literals_index = Parameter(1); | |
| 743 | |
| 744 Node* properties_count_node = | |
| 745 IntPtrConstant(FastCloneShallowObjectPropertiesCount(properties_count)); | |
| 746 Node* copy = EmitFastCloneShallowObject( | |
| 747 &call_runtime, closure, literals_index, properties_count_node); | |
| 748 Return(copy); | |
| 749 | |
| 750 Bind(&call_runtime); | |
| 751 Node* constant_properties = Parameter(2); | |
| 752 Node* flags = Parameter(3); | |
| 753 Node* context = Parameter(4); | |
| 754 TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure, | |
| 755 literals_index, constant_properties, flags); | |
| 756 } | |
| 757 | |
| 758 #define SHALLOW_OBJECT_BUILTIN(props) \ | |
| 759 TF_BUILTIN(FastCloneShallowObject##props, ConstructorBuiltinsAssembler) { \ | |
| 760 CreateFastCloneShallowObjectBuiltin(props); \ | |
| 761 } | |
| 762 | |
| 763 SHALLOW_OBJECT_BUILTIN(0); | |
| 764 SHALLOW_OBJECT_BUILTIN(1); | |
| 765 SHALLOW_OBJECT_BUILTIN(2); | |
| 766 SHALLOW_OBJECT_BUILTIN(3); | |
| 767 SHALLOW_OBJECT_BUILTIN(4); | |
| 768 SHALLOW_OBJECT_BUILTIN(5); | |
| 769 SHALLOW_OBJECT_BUILTIN(6); | |
| 770 | |
| 771 Handle<Code> Builtins::NewCloneShallowObject(int length) { | |
| 772 switch (length) { | |
| 773 case 0: | |
| 774 return FastCloneShallowObject0(); | |
| 775 case 1: | |
| 776 return FastCloneShallowObject1(); | |
| 777 case 2: | |
| 778 return FastCloneShallowObject2(); | |
| 779 case 3: | |
| 780 return FastCloneShallowObject3(); | |
| 781 case 4: | |
| 782 return FastCloneShallowObject4(); | |
| 783 case 5: | |
| 784 return FastCloneShallowObject5(); | |
| 785 case 6: | |
| 786 return FastCloneShallowObject6(); | |
| 787 default: | |
| 788 UNREACHABLE(); | |
| 789 } | |
| 790 return Handle<Code>::null(); | |
| 791 } | |
| 792 | |
| 793 } // namespace internal | |
| 794 } // namespace v8 | |
| OLD | NEW |