Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 117 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); | 117 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
| 118 GenerateTailCallToReturnedCode(masm); | 118 GenerateTailCallToReturnedCode(masm); |
| 119 | 119 |
| 120 __ bind(&ok); | 120 __ bind(&ok); |
| 121 GenerateTailCallToSharedCode(masm); | 121 GenerateTailCallToSharedCode(masm); |
| 122 } | 122 } |
| 123 | 123 |
| 124 | 124 |
| 125 static void Generate_JSConstructStubHelper(MacroAssembler* masm, | 125 static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
| 126 bool is_api_function, | 126 bool is_api_function, |
| 127 bool count_constructions) { | 127 bool count_constructions, |
| 128 bool create_memento) { | |
| 128 // ----------- S t a t e ------------- | 129 // ----------- S t a t e ------------- |
| 129 // -- rax: number of arguments | 130 // -- rax: number of arguments |
| 130 // -- rdi: constructor function | 131 // -- rdi: constructor function |
| 132 // -- rbx: allocation site or undefined | |
| 131 // ----------------------------------- | 133 // ----------------------------------- |
| 132 | 134 |
| 133 // Should never count constructions for api objects. | 135 // Should never count constructions for api objects. |
| 134 ASSERT(!is_api_function || !count_constructions); | 136 ASSERT(!is_api_function || !count_constructions);\ |
| 137 | |
| 138 // Should never create mementos for api functions. | |
| 139 ASSERT(!is_api_function || !create_memento); | |
| 140 | |
| 141 // Should never create mementos before slack tracking is finished. | |
| 142 ASSERT(!count_constructions || !create_memento); | |
| 135 | 143 |
| 136 // Enter a construct frame. | 144 // Enter a construct frame. |
| 137 { | 145 { |
| 138 FrameScope scope(masm, StackFrame::CONSTRUCT); | 146 FrameScope scope(masm, StackFrame::CONSTRUCT); |
| 139 | 147 |
| 148 if (create_memento) { | |
| 149 __ AssertUndefinedOrAllocationSite(rbx); | |
| 150 __ push(rbx); | |
| 151 } | |
| 152 | |
| 140 // Store a smi-tagged arguments count on the stack. | 153 // Store a smi-tagged arguments count on the stack. |
| 141 __ Integer32ToSmi(rax, rax); | 154 __ Integer32ToSmi(rax, rax); |
| 142 __ push(rax); | 155 __ push(rax); |
| 143 | 156 |
| 144 // Push the function to invoke on the stack. | 157 // Push the function to invoke on the stack. |
| 145 __ push(rdi); | 158 __ push(rdi); |
| 146 | 159 |
| 147 // Try to allocate the object without transitioning into C code. If any of | 160 // Try to allocate the object without transitioning into C code. If any of |
| 148 // the preconditions is not met, the code bails out to the runtime call. | 161 // the preconditions is not met, the code bails out to the runtime call. |
| 149 Label rt_call, allocated; | 162 Label rt_call, allocated; |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 195 | 208 |
| 196 __ pop(rdi); | 209 __ pop(rdi); |
| 197 __ pop(rax); | 210 __ pop(rax); |
| 198 | 211 |
| 199 __ bind(&allocate); | 212 __ bind(&allocate); |
| 200 } | 213 } |
| 201 | 214 |
| 202 // Now allocate the JSObject on the heap. | 215 // Now allocate the JSObject on the heap. |
| 203 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); | 216 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); |
| 204 __ shl(rdi, Immediate(kPointerSizeLog2)); | 217 __ shl(rdi, Immediate(kPointerSizeLog2)); |
| 218 if (create_memento) { | |
| 219 __ addq(rdi, Immediate(AllocationMemento::kSize)); | |
| 220 } | |
| 205 // rdi: size of new object | 221 // rdi: size of new object |
| 206 __ Allocate(rdi, | 222 __ Allocate(rdi, |
| 207 rbx, | 223 rbx, |
| 208 rdi, | 224 rdi, |
| 209 no_reg, | 225 no_reg, |
| 210 &rt_call, | 226 &rt_call, |
| 211 NO_ALLOCATION_FLAGS); | 227 NO_ALLOCATION_FLAGS); |
| 228 Factory* factory = masm->isolate()->factory(); | |
| 212 // Allocated the JSObject, now initialize the fields. | 229 // Allocated the JSObject, now initialize the fields. |
| 213 // rax: initial map | 230 // rax: initial map |
| 214 // rbx: JSObject (not HeapObject tagged - the actual address). | 231 // rbx: JSObject (not HeapObject tagged - the actual address). |
| 215 // rdi: start of next object | 232 // rdi: start of next object (including memento if create_memento) |
| 216 __ movp(Operand(rbx, JSObject::kMapOffset), rax); | 233 __ movp(Operand(rbx, JSObject::kMapOffset), rax); |
| 217 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); | 234 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); |
| 218 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx); | 235 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx); |
| 219 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx); | 236 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx); |
| 220 // Set extra fields in the newly allocated object. | 237 // Set extra fields in the newly allocated object. |
| 221 // rax: initial map | 238 // rax: initial map |
| 222 // rbx: JSObject | 239 // rbx: JSObject |
| 223 // rdi: start of next object | 240 // rdi: start of next object (including memento if create_memento) |
| 224 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); | 241 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); |
| 225 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); | 242 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
| 226 if (count_constructions) { | 243 if (count_constructions) { |
| 227 __ movzxbq(rsi, | 244 __ movzxbq(rsi, |
| 228 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); | 245 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); |
| 229 __ lea(rsi, | 246 __ lea(rsi, |
| 230 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize)); | 247 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize)); |
| 231 // rsi: offset of first field after pre-allocated fields | 248 // rsi: offset of first field after pre-allocated fields |
| 232 if (FLAG_debug_code) { | 249 if (FLAG_debug_code) { |
| 233 __ cmpq(rsi, rdi); | 250 __ cmpq(rsi, rdi); |
| 234 __ Assert(less_equal, | 251 __ Assert(less_equal, |
| 235 kUnexpectedNumberOfPreAllocatedPropertyFields); | 252 kUnexpectedNumberOfPreAllocatedPropertyFields); |
| 236 } | 253 } |
| 237 __ InitializeFieldsWithFiller(rcx, rsi, rdx); | 254 __ InitializeFieldsWithFiller(rcx, rsi, rdx); |
| 238 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); | 255 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); |
| 256 __ InitializeFieldsWithFiller(rcx, rdi, rdx); | |
| 257 } else if (create_memento) { | |
| 258 __ lea(rsi, Operand(rdi, -AllocationMemento::kSize)); | |
| 259 __ InitializeFieldsWithFiller(rcx, rsi, rdx); | |
| 260 | |
| 261 // Fill in memento fields if necessary. | |
| 262 // rsi: points to the allocated but uninitialized memento. | |
| 263 Handle<Map> allocation_memento_map = factory->allocation_memento_map(); | |
| 264 __ Move(Operand(rsi, AllocationMemento::kMapOffset), | |
| 265 allocation_memento_map); | |
| 266 // Get the cell or undefined. | |
| 267 __ movp(rdx, Operand(rsp, kPointerSize*2)); | |
| 268 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), | |
| 269 rdx); | |
| 270 } else { | |
| 271 __ InitializeFieldsWithFiller(rcx, rdi, rdx); | |
| 239 } | 272 } |
|
Hannes Payer (out of office)
2014/02/18 16:24:26
__ InitializeFieldsWithFiller(rcx, rdi, rdx); outs
mvstanton
2014/02/19 08:40:26
It's awkward to do it because in the create_mement
| |
| 240 __ InitializeFieldsWithFiller(rcx, rdi, rdx); | |
| 241 | 273 |
| 242 // Add the object tag to make the JSObject real, so that we can continue | 274 // Add the object tag to make the JSObject real, so that we can continue |
| 243 // and jump into the continuation code at any time from now on. Any | 275 // and jump into the continuation code at any time from now on. Any |
| 244 // failures need to undo the allocation, so that the heap is in a | 276 // failures need to undo the allocation, so that the heap is in a |
| 245 // consistent state and verifiable. | 277 // consistent state and verifiable. |
| 246 // rax: initial map | 278 // rax: initial map |
| 247 // rbx: JSObject | 279 // rbx: JSObject |
| 248 // rdi: start of next object | 280 // rdi: start of next object |
| 249 __ or_(rbx, Immediate(kHeapObjectTag)); | 281 __ or_(rbx, Immediate(kHeapObjectTag)); |
| 250 | 282 |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 322 // example, the map's unused properties potentially do not match the | 354 // example, the map's unused properties potentially do not match the |
| 323 // allocated objects unused properties. | 355 // allocated objects unused properties. |
| 324 // rbx: JSObject (previous new top) | 356 // rbx: JSObject (previous new top) |
| 325 __ bind(&undo_allocation); | 357 __ bind(&undo_allocation); |
| 326 __ UndoAllocationInNewSpace(rbx); | 358 __ UndoAllocationInNewSpace(rbx); |
| 327 } | 359 } |
| 328 | 360 |
| 329 // Allocate the new receiver object using the runtime call. | 361 // Allocate the new receiver object using the runtime call. |
| 330 // rdi: function (constructor) | 362 // rdi: function (constructor) |
| 331 __ bind(&rt_call); | 363 __ bind(&rt_call); |
| 364 int offset = 0; | |
| 365 if (create_memento) { | |
| 366 // Get the cell or allocation site. | |
| 367 __ movp(rdi, Operand(rsp, kPointerSize*2)); | |
| 368 __ push(rdi); | |
| 369 offset = kPointerSize; | |
| 370 } | |
| 371 | |
| 332 // Must restore rdi (constructor) before calling runtime. | 372 // Must restore rdi (constructor) before calling runtime. |
| 333 __ movp(rdi, Operand(rsp, 0)); | 373 __ movp(rdi, Operand(rsp, offset)); |
| 334 __ push(rdi); | 374 __ push(rdi); |
| 335 __ CallRuntime(Runtime::kNewObject, 1); | 375 if (create_memento) { |
| 376 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2); | |
| 377 } else { | |
| 378 __ CallRuntime(Runtime::kNewObject, 1); | |
| 379 } | |
| 336 __ movp(rbx, rax); // store result in rbx | 380 __ movp(rbx, rax); // store result in rbx |
| 337 | 381 |
| 382 // If we ended up using the runtime, and we want a memento, then the | |
| 383 // runtime call made it for us, and we shouldn't do create count | |
| 384 // increment. | |
| 385 Label count_incremented; | |
| 386 if (create_memento) { | |
| 387 __ jmp(&count_incremented); | |
| 388 } | |
| 389 | |
| 338 // New object allocated. | 390 // New object allocated. |
| 339 // rbx: newly allocated object | 391 // rbx: newly allocated object |
| 340 __ bind(&allocated); | 392 __ bind(&allocated); |
| 393 | |
| 394 if (create_memento) { | |
| 395 __ movp(rcx, Operand(rsp, kPointerSize*2)); | |
| 396 __ Cmp(rcx, masm->isolate()->factory()->undefined_value()); | |
| 397 __ j(equal, &count_incremented); | |
| 398 // rcx is an AllocationSite. We are creating a memento from it, so we | |
| 399 // need to increment the memento create count. | |
| 400 __ SmiAddConstant( | |
| 401 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset), | |
| 402 Smi::FromInt(1)); | |
| 403 __ bind(&count_incremented); | |
| 404 } | |
|
Hannes Payer (out of office)
2014/02/18 16:24:26
__ bind(&count_incremented); outside of if?
mvstanton
2014/02/19 08:40:26
I thought about that a lot, and decided to put it
| |
| 405 | |
| 341 // Retrieve the function from the stack. | 406 // Retrieve the function from the stack. |
| 342 __ pop(rdi); | 407 __ pop(rdi); |
| 343 | 408 |
| 344 // Retrieve smi-tagged arguments count from the stack. | 409 // Retrieve smi-tagged arguments count from the stack. |
| 345 __ movp(rax, Operand(rsp, 0)); | 410 __ movp(rax, Operand(rsp, 0)); |
| 346 __ SmiToInteger32(rax, rax); | 411 __ SmiToInteger32(rax, rax); |
| 347 | 412 |
| 348 // Push the allocated receiver to the stack. We need two copies | 413 // Push the allocated receiver to the stack. We need two copies |
| 349 // because we may have to return the original one and the calling | 414 // because we may have to return the original one and the calling |
| 350 // conventions dictate that the called function pops the receiver. | 415 // conventions dictate that the called function pops the receiver. |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 413 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | 478 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); |
| 414 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); | 479 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); |
| 415 __ PushReturnAddressFrom(rcx); | 480 __ PushReturnAddressFrom(rcx); |
| 416 Counters* counters = masm->isolate()->counters(); | 481 Counters* counters = masm->isolate()->counters(); |
| 417 __ IncrementCounter(counters->constructed_objects(), 1); | 482 __ IncrementCounter(counters->constructed_objects(), 1); |
| 418 __ ret(0); | 483 __ ret(0); |
| 419 } | 484 } |
| 420 | 485 |
| 421 | 486 |
| 422 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { | 487 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { |
| 423 Generate_JSConstructStubHelper(masm, false, true); | 488 Generate_JSConstructStubHelper(masm, false, true, false); |
| 424 } | 489 } |
| 425 | 490 |
| 426 | 491 |
| 427 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { | 492 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
| 428 Generate_JSConstructStubHelper(masm, false, false); | 493 Generate_JSConstructStubHelper(masm, false, false, true); |
| 429 } | 494 } |
| 430 | 495 |
| 431 | 496 |
| 432 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { | 497 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
| 433 Generate_JSConstructStubHelper(masm, true, false); | 498 Generate_JSConstructStubHelper(masm, true, false, false); |
| 434 } | 499 } |
| 435 | 500 |
| 436 | 501 |
| 437 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, | 502 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
| 438 bool is_construct) { | 503 bool is_construct) { |
| 439 ProfileEntryHookStub::MaybeCallEntryHook(masm); | 504 ProfileEntryHookStub::MaybeCallEntryHook(masm); |
| 440 | 505 |
| 441 // Expects five C++ function parameters. | 506 // Expects five C++ function parameters. |
| 442 // - Address entry (ignored) | 507 // - Address entry (ignored) |
| 443 // - JSFunction* function ( | 508 // - JSFunction* function ( |
| (...skipping 990 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1434 __ bind(&ok); | 1499 __ bind(&ok); |
| 1435 __ ret(0); | 1500 __ ret(0); |
| 1436 } | 1501 } |
| 1437 | 1502 |
| 1438 | 1503 |
| 1439 #undef __ | 1504 #undef __ |
| 1440 | 1505 |
| 1441 } } // namespace v8::internal | 1506 } } // namespace v8::internal |
| 1442 | 1507 |
| 1443 #endif // V8_TARGET_ARCH_X64 | 1508 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |