| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 310 // Create a new closure from the given function info in new | 310 // Create a new closure from the given function info in new |
| 311 // space. Set the context to the current context in rsi. | 311 // space. Set the context to the current context in rsi. |
| 312 Counters* counters = masm->isolate()->counters(); | 312 Counters* counters = masm->isolate()->counters(); |
| 313 | 313 |
| 314 Label gc; | 314 Label gc; |
| 315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); | 315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); |
| 316 | 316 |
| 317 __ IncrementCounter(counters->fast_new_closure_total(), 1); | 317 __ IncrementCounter(counters->fast_new_closure_total(), 1); |
| 318 | 318 |
| 319 // Get the function info from the stack. | 319 // Get the function info from the stack. |
| 320 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 320 __ movq(rdx, StackOperandForArgument(1 * kPointerSize)); |
| 321 | 321 |
| 322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); | 322 int map_index = Context::FunctionMapIndex(language_mode_, is_generator_); |
| 323 | 323 |
| 324 // Compute the function map in the current native context and set that | 324 // Compute the function map in the current native context and set that |
| 325 // as the map of the allocated object. | 325 // as the map of the allocated object. |
| 326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 326 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); | 327 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); |
| 328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); | 328 __ movq(rbx, Operand(rcx, Context::SlotOffset(map_index))); |
| 329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); | 329 __ movq(FieldOperand(rax, JSObject::kMapOffset), rbx); |
| 330 | 330 |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 418 rcx, | 418 rcx, |
| 419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), | 419 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), |
| 420 rdx, | 420 rdx, |
| 421 rbx, | 421 rbx, |
| 422 kDontSaveFPRegs); | 422 kDontSaveFPRegs); |
| 423 | 423 |
| 424 // Return and remove the on-stack parameter. | 424 // Return and remove the on-stack parameter. |
| 425 __ ret(1 * kPointerSize); | 425 __ ret(1 * kPointerSize); |
| 426 | 426 |
| 427 __ bind(&restore); | 427 __ bind(&restore); |
| 428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 428 __ movq(rdx, StackOperandForArgument(1 * kPointerSize)); |
| 429 __ jmp(&install_unoptimized); | 429 __ jmp(&install_unoptimized); |
| 430 | 430 |
| 431 // Create a new closure through the slower runtime call. | 431 // Create a new closure through the slower runtime call. |
| 432 __ bind(&gc); | 432 __ bind(&gc); |
| 433 __ pop(rcx); // Temporarily remove return address. | 433 __ pop(rcx); // Temporarily remove return address. |
| 434 __ pop(rdx); | 434 __ pop(rdx); |
| 435 __ push(rsi); | 435 __ push(rsi); |
| 436 __ push(rdx); | 436 __ push(rdx); |
| 437 __ PushRoot(Heap::kFalseValueRootIndex); | 437 __ PushRoot(Heap::kFalseValueRootIndex); |
| 438 __ push(rcx); // Restore return address. | 438 __ push(rcx); // Restore return address. |
| 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
| 440 } | 440 } |
| 441 | 441 |
| 442 | 442 |
| 443 void FastNewContextStub::Generate(MacroAssembler* masm) { | 443 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 444 // Try to allocate the context in new space. | 444 // Try to allocate the context in new space. |
| 445 Label gc; | 445 Label gc; |
| 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, | 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
| 448 rax, rbx, rcx, &gc, TAG_OBJECT); | 448 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 449 | 449 |
| 450 // Get the function from the stack. | 450 // Get the function from the stack. |
| 451 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 451 __ movq(rcx, StackOperandForArgument(1 * kPointerSize)); |
| 452 | 452 |
| 453 // Set up the object header. | 453 // Set up the object header. |
| 454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); | 454 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); |
| 455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 455 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 456 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
| 457 | 457 |
| 458 // Set up the fixed slots. | 458 // Set up the fixed slots. |
| 459 __ Set(rbx, 0); // Set to NULL. | 459 __ Set(rbx, 0); // Set to NULL. |
| 460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); | 460 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); |
| 461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); | 461 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 487 // [rsp + (1 * kPointerSize)] : function | 487 // [rsp + (1 * kPointerSize)] : function |
| 488 // [rsp + (2 * kPointerSize)] : serialized scope info | 488 // [rsp + (2 * kPointerSize)] : serialized scope info |
| 489 | 489 |
| 490 // Try to allocate the context in new space. | 490 // Try to allocate the context in new space. |
| 491 Label gc; | 491 Label gc; |
| 492 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 492 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 493 __ Allocate(FixedArray::SizeFor(length), | 493 __ Allocate(FixedArray::SizeFor(length), |
| 494 rax, rbx, rcx, &gc, TAG_OBJECT); | 494 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 495 | 495 |
| 496 // Get the function from the stack. | 496 // Get the function from the stack. |
| 497 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 497 __ movq(rcx, StackOperandForArgument(1 * kPointerSize)); |
| 498 | 498 |
| 499 // Get the serialized scope info from the stack. | 499 // Get the serialized scope info from the stack. |
| 500 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 500 __ movq(rbx, StackOperandForArgument(2 * kPointerSize)); |
| 501 | 501 |
| 502 // Set up the object header. | 502 // Set up the object header. |
| 503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | 503 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
| 504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
| 506 | 506 |
| 507 // If this block context is nested in the native context we get a smi | 507 // If this block context is nested in the native context we get a smi |
| 508 // sentinel instead of a function. The block context should get the | 508 // sentinel instead of a function. The block context should get the |
| 509 // canonical empty function of the native context as its closure which | 509 // canonical empty function of the native context as its closure which |
| 510 // we still have to look up. | 510 // we still have to look up. |
| (...skipping 758 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1269 // Output: | 1269 // Output: |
| 1270 // xmm1 : untagged double result. | 1270 // xmm1 : untagged double result. |
| 1271 | 1271 |
| 1272 Label runtime_call; | 1272 Label runtime_call; |
| 1273 Label runtime_call_clear_stack; | 1273 Label runtime_call_clear_stack; |
| 1274 Label skip_cache; | 1274 Label skip_cache; |
| 1275 const bool tagged = (argument_type_ == TAGGED); | 1275 const bool tagged = (argument_type_ == TAGGED); |
| 1276 if (tagged) { | 1276 if (tagged) { |
| 1277 Label input_not_smi, loaded; | 1277 Label input_not_smi, loaded; |
| 1278 // Test that rax is a number. | 1278 // Test that rax is a number. |
| 1279 __ movq(rax, Operand(rsp, kPointerSize)); | 1279 __ movq(rax, StackOperandForArgument(1 * kPointerSize)); |
| 1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); | 1280 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); |
| 1281 // Input is a smi. Untag and load it onto the FPU stack. | 1281 // Input is a smi. Untag and load it onto the FPU stack. |
| 1282 // Then load the bits of the double into rbx. | 1282 // Then load the bits of the double into rbx. |
| 1283 __ SmiToInteger32(rax, rax); | 1283 __ SmiToInteger32(rax, rax); |
| 1284 __ subq(rsp, Immediate(kDoubleSize)); | 1284 __ subq(rsp, Immediate(kDoubleSize)); |
| 1285 __ cvtlsi2sd(xmm1, rax); | 1285 __ cvtlsi2sd(xmm1, rax); |
| 1286 __ movsd(Operand(rsp, 0), xmm1); | 1286 __ movsd(Operand(rsp, 0), xmm1); |
| 1287 __ movq(rbx, xmm1); | 1287 __ movq(rbx, xmm1); |
| 1288 __ movq(rdx, xmm1); | 1288 __ movq(rdx, xmm1); |
| 1289 __ fld_d(Operand(rsp, 0)); | 1289 __ fld_d(Operand(rsp, 0)); |
| (...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1802 | 1802 |
| 1803 // Save 1 in double_result - we need this several times later on. | 1803 // Save 1 in double_result - we need this several times later on. |
| 1804 __ movq(scratch, Immediate(1)); | 1804 __ movq(scratch, Immediate(1)); |
| 1805 __ cvtlsi2sd(double_result, scratch); | 1805 __ cvtlsi2sd(double_result, scratch); |
| 1806 | 1806 |
| 1807 if (exponent_type_ == ON_STACK) { | 1807 if (exponent_type_ == ON_STACK) { |
| 1808 Label base_is_smi, unpack_exponent; | 1808 Label base_is_smi, unpack_exponent; |
| 1809 // The exponent and base are supplied as arguments on the stack. | 1809 // The exponent and base are supplied as arguments on the stack. |
| 1810 // This can only happen if the stub is called from non-optimized code. | 1810 // This can only happen if the stub is called from non-optimized code. |
| 1811 // Load input parameters from stack. | 1811 // Load input parameters from stack. |
| 1812 __ movq(base, Operand(rsp, 2 * kPointerSize)); | 1812 __ movq(base, StackOperandForArgument(2 * kPointerSize)); |
| 1813 __ movq(exponent, Operand(rsp, 1 * kPointerSize)); | 1813 __ movq(exponent, StackOperandForArgument(1 * kPointerSize)); |
| 1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear); | 1814 __ JumpIfSmi(base, &base_is_smi, Label::kNear); |
| 1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), | 1815 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), |
| 1816 Heap::kHeapNumberMapRootIndex); | 1816 Heap::kHeapNumberMapRootIndex); |
| 1817 __ j(not_equal, &call_runtime); | 1817 __ j(not_equal, &call_runtime); |
| 1818 | 1818 |
| 1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); | 1819 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); |
| 1820 __ jmp(&unpack_exponent, Label::kNear); | 1820 __ jmp(&unpack_exponent, Label::kNear); |
| 1821 | 1821 |
| 1822 __ bind(&base_is_smi); | 1822 __ bind(&base_is_smi); |
| 1823 __ SmiToInteger32(base, base); | 1823 __ SmiToInteger32(base, base); |
| (...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2236 // rsp[0] : return address | 2236 // rsp[0] : return address |
| 2237 // rsp[8] : number of parameters (tagged) | 2237 // rsp[8] : number of parameters (tagged) |
| 2238 // rsp[16] : receiver displacement | 2238 // rsp[16] : receiver displacement |
| 2239 // rsp[24] : function | 2239 // rsp[24] : function |
| 2240 // Registers used over the whole function: | 2240 // Registers used over the whole function: |
| 2241 // rbx: the mapped parameter count (untagged) | 2241 // rbx: the mapped parameter count (untagged) |
| 2242 // rax: the allocated object (tagged). | 2242 // rax: the allocated object (tagged). |
| 2243 | 2243 |
| 2244 Factory* factory = masm->isolate()->factory(); | 2244 Factory* factory = masm->isolate()->factory(); |
| 2245 | 2245 |
| 2246 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); | 2246 __ SmiToInteger64(rbx, StackOperandForArgument(1 * kPointerSize)); |
| 2247 // rbx = parameter count (untagged) | 2247 // rbx = parameter count (untagged) |
| 2248 | 2248 |
| 2249 // Check if the calling frame is an arguments adaptor frame. | 2249 // Check if the calling frame is an arguments adaptor frame. |
| 2250 Label runtime; | 2250 Label runtime; |
| 2251 Label adaptor_frame, try_allocate; | 2251 Label adaptor_frame, try_allocate; |
| 2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2252 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2253 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
| 2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2254 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 2255 __ j(equal, &adaptor_frame); | 2255 __ j(equal, &adaptor_frame); |
| 2256 | 2256 |
| 2257 // No adaptor, parameter count = argument count. | 2257 // No adaptor, parameter count = argument count. |
| 2258 __ movq(rcx, rbx); | 2258 __ movq(rcx, rbx); |
| 2259 __ jmp(&try_allocate, Label::kNear); | 2259 __ jmp(&try_allocate, Label::kNear); |
| 2260 | 2260 |
| 2261 // We have an adaptor frame. Patch the parameters pointer. | 2261 // We have an adaptor frame. Patch the parameters pointer. |
| 2262 __ bind(&adaptor_frame); | 2262 __ bind(&adaptor_frame); |
| 2263 __ SmiToInteger64(rcx, | 2263 __ SmiToInteger64(rcx, |
| 2264 Operand(rdx, | 2264 Operand(rdx, |
| 2265 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2265 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 2266 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
| 2267 StandardFrameConstants::kCallerSPOffset)); | 2267 StandardFrameConstants::kCallerSPOffset)); |
| 2268 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 2268 __ movq(StackOperandForArgument(2 * kPointerSize), rdx); |
| 2269 | 2269 |
| 2270 // rbx = parameter count (untagged) | 2270 // rbx = parameter count (untagged) |
| 2271 // rcx = argument count (untagged) | 2271 // rcx = argument count (untagged) |
| 2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx. | 2272 // Compute the mapped parameter count = min(rbx, rcx) in rbx. |
| 2273 __ cmpq(rbx, rcx); | 2273 __ cmpq(rbx, rcx); |
| 2274 __ j(less_equal, &try_allocate, Label::kNear); | 2274 __ j(less_equal, &try_allocate, Label::kNear); |
| 2275 __ movq(rbx, rcx); | 2275 __ movq(rbx, rcx); |
| 2276 | 2276 |
| 2277 __ bind(&try_allocate); | 2277 __ bind(&try_allocate); |
| 2278 | 2278 |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2319 // rcx = argument count (untagged) | 2319 // rcx = argument count (untagged) |
| 2320 // rdi = address of boilerplate object (tagged) | 2320 // rdi = address of boilerplate object (tagged) |
| 2321 // Copy the JS object part. | 2321 // Copy the JS object part. |
| 2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 2322 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
| 2323 __ movq(rdx, FieldOperand(rdi, i)); | 2323 __ movq(rdx, FieldOperand(rdi, i)); |
| 2324 __ movq(FieldOperand(rax, i), rdx); | 2324 __ movq(FieldOperand(rax, i), rdx); |
| 2325 } | 2325 } |
| 2326 | 2326 |
| 2327 // Set up the callee in-object property. | 2327 // Set up the callee in-object property. |
| 2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | 2328 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); |
| 2329 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); | 2329 __ movq(rdx, StackOperandForArgument(3 * kPointerSize)); |
| 2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 2330 __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| 2331 Heap::kArgumentsCalleeIndex * kPointerSize), | 2331 Heap::kArgumentsCalleeIndex * kPointerSize), |
| 2332 rdx); | 2332 rdx); |
| 2333 | 2333 |
| 2334 // Use the length (smi tagged) and set that as an in-object property too. | 2334 // Use the length (smi tagged) and set that as an in-object property too. |
| 2335 // Note: rcx is tagged from here on. | 2335 // Note: rcx is tagged from here on. |
| 2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 2336 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 2337 __ Integer32ToSmi(rcx, rcx); | 2337 __ Integer32ToSmi(rcx, rcx); |
| 2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 2338 __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| 2339 Heap::kArgumentsLengthIndex * kPointerSize), | 2339 Heap::kArgumentsLengthIndex * kPointerSize), |
| (...skipping 30 matching lines...) Expand all Loading... |
| 2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | 2370 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
| 2371 // The mapped parameter thus need to get indices | 2371 // The mapped parameter thus need to get indices |
| 2372 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | 2372 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
| 2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | 2373 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
| 2374 // We loop from right to left. | 2374 // We loop from right to left. |
| 2375 Label parameters_loop, parameters_test; | 2375 Label parameters_loop, parameters_test; |
| 2376 | 2376 |
| 2377 // Load tagged parameter count into r9. | 2377 // Load tagged parameter count into r9. |
| 2378 __ Integer32ToSmi(r9, rbx); | 2378 __ Integer32ToSmi(r9, rbx); |
| 2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); | 2379 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); |
| 2380 __ addq(r8, Operand(rsp, 1 * kPointerSize)); | 2380 __ addq(r8, StackOperandForArgument(1 * kPointerSize)); |
| 2381 __ subq(r8, r9); | 2381 __ subq(r8, r9); |
| 2382 __ Move(r11, factory->the_hole_value()); | 2382 __ Move(r11, factory->the_hole_value()); |
| 2383 __ movq(rdx, rdi); | 2383 __ movq(rdx, rdi); |
| 2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); | 2384 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); |
| 2385 // r9 = loop variable (tagged) | 2385 // r9 = loop variable (tagged) |
| 2386 // r8 = mapping index (tagged) | 2386 // r8 = mapping index (tagged) |
| 2387 // r11 = the hole value | 2387 // r11 = the hole value |
| 2388 // rdx = address of parameter map (tagged) | 2388 // rdx = address of parameter map (tagged) |
| 2389 // rdi = address of backing store (tagged) | 2389 // rdi = address of backing store (tagged) |
| 2390 __ jmp(¶meters_test, Label::kNear); | 2390 __ jmp(¶meters_test, Label::kNear); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 2409 | 2409 |
| 2410 // rcx = argument count (tagged) | 2410 // rcx = argument count (tagged) |
| 2411 // rdi = address of backing store (tagged) | 2411 // rdi = address of backing store (tagged) |
| 2412 // Copy arguments header and remaining slots (if there are any). | 2412 // Copy arguments header and remaining slots (if there are any). |
| 2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), | 2413 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), |
| 2414 factory->fixed_array_map()); | 2414 factory->fixed_array_map()); |
| 2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 2415 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
| 2416 | 2416 |
| 2417 Label arguments_loop, arguments_test; | 2417 Label arguments_loop, arguments_test; |
| 2418 __ movq(r8, rbx); | 2418 __ movq(r8, rbx); |
| 2419 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); | 2419 __ movq(rdx, StackOperandForArgument(2 * kPointerSize)); |
| 2420 // Untag rcx for the loop below. | 2420 // Untag rcx for the loop below. |
| 2421 __ SmiToInteger64(rcx, rcx); | 2421 __ SmiToInteger64(rcx, rcx); |
| 2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); | 2422 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); |
| 2423 __ subq(rdx, kScratchRegister); | 2423 __ subq(rdx, kScratchRegister); |
| 2424 __ jmp(&arguments_test, Label::kNear); | 2424 __ jmp(&arguments_test, Label::kNear); |
| 2425 | 2425 |
| 2426 __ bind(&arguments_loop); | 2426 __ bind(&arguments_loop); |
| 2427 __ subq(rdx, Immediate(kPointerSize)); | 2427 __ subq(rdx, Immediate(kPointerSize)); |
| 2428 __ movq(r9, Operand(rdx, 0)); | 2428 __ movq(r9, Operand(rdx, 0)); |
| 2429 __ movq(FieldOperand(rdi, r8, | 2429 __ movq(FieldOperand(rdi, r8, |
| 2430 times_pointer_size, | 2430 times_pointer_size, |
| 2431 FixedArray::kHeaderSize), | 2431 FixedArray::kHeaderSize), |
| 2432 r9); | 2432 r9); |
| 2433 __ addq(r8, Immediate(1)); | 2433 __ addq(r8, Immediate(1)); |
| 2434 | 2434 |
| 2435 __ bind(&arguments_test); | 2435 __ bind(&arguments_test); |
| 2436 __ cmpq(r8, rcx); | 2436 __ cmpq(r8, rcx); |
| 2437 __ j(less, &arguments_loop, Label::kNear); | 2437 __ j(less, &arguments_loop, Label::kNear); |
| 2438 | 2438 |
| 2439 // Return and remove the on-stack parameters. | 2439 // Return and remove the on-stack parameters. |
| 2440 __ ret(3 * kPointerSize); | 2440 __ ret(3 * kPointerSize); |
| 2441 | 2441 |
| 2442 // Do the runtime call to allocate the arguments object. | 2442 // Do the runtime call to allocate the arguments object. |
| 2443 // rcx = argument count (untagged) | 2443 // rcx = argument count (untagged) |
| 2444 __ bind(&runtime); | 2444 __ bind(&runtime); |
| 2445 __ Integer32ToSmi(rcx, rcx); | 2445 __ Integer32ToSmi(rcx, rcx); |
| 2446 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. | 2446 // Patch argument count. |
| 2447 __ movq(StackOperandForArgument(1 * kPointerSize), rcx); |
| 2447 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2448 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
| 2448 } | 2449 } |
| 2449 | 2450 |
| 2450 | 2451 |
| 2451 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { | 2452 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { |
| 2452 // rsp[0] : return address | 2453 // rsp[0] : return address |
| 2453 // rsp[8] : number of parameters | 2454 // rsp[8] : number of parameters |
| 2454 // rsp[16] : receiver displacement | 2455 // rsp[16] : receiver displacement |
| 2455 // rsp[24] : function | 2456 // rsp[24] : function |
| 2456 | 2457 |
| 2457 // Check if the calling frame is an arguments adaptor frame. | 2458 // Check if the calling frame is an arguments adaptor frame. |
| 2458 Label runtime; | 2459 Label runtime; |
| 2459 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2460 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 2460 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2461 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
| 2461 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2462 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 2462 __ j(not_equal, &runtime); | 2463 __ j(not_equal, &runtime); |
| 2463 | 2464 |
| 2464 // Patch the arguments.length and the parameters pointer. | 2465 // Patch the arguments.length and the parameters pointer. |
| 2465 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2466 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2466 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 2467 __ movq(StackOperandForArgument(1 * kPointerSize), rcx); |
| 2467 __ SmiToInteger64(rcx, rcx); | 2468 __ SmiToInteger64(rcx, rcx); |
| 2468 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 2469 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
| 2469 StandardFrameConstants::kCallerSPOffset)); | 2470 StandardFrameConstants::kCallerSPOffset)); |
| 2470 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 2471 __ movq(StackOperandForArgument(2 * kPointerSize), rdx); |
| 2471 | 2472 |
| 2472 __ bind(&runtime); | 2473 __ bind(&runtime); |
| 2473 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); | 2474 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); |
| 2474 } | 2475 } |
| 2475 | 2476 |
| 2476 | 2477 |
| 2477 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 2478 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| 2478 // rsp[0] : return address | 2479 // rsp[0] : return address |
| 2479 // rsp[8] : number of parameters | 2480 // rsp[8] : number of parameters |
| 2480 // rsp[16] : receiver displacement | 2481 // rsp[16] : receiver displacement |
| 2481 // rsp[24] : function | 2482 // rsp[24] : function |
| 2482 | 2483 |
| 2483 // Check if the calling frame is an arguments adaptor frame. | 2484 // Check if the calling frame is an arguments adaptor frame. |
| 2484 Label adaptor_frame, try_allocate, runtime; | 2485 Label adaptor_frame, try_allocate, runtime; |
| 2485 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 2486 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 2486 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); | 2487 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); |
| 2487 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 2488 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 2488 __ j(equal, &adaptor_frame); | 2489 __ j(equal, &adaptor_frame); |
| 2489 | 2490 |
| 2490 // Get the length from the frame. | 2491 // Get the length from the frame. |
| 2491 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 2492 __ movq(rcx, StackOperandForArgument(1 * kPointerSize)); |
| 2492 __ SmiToInteger64(rcx, rcx); | 2493 __ SmiToInteger64(rcx, rcx); |
| 2493 __ jmp(&try_allocate); | 2494 __ jmp(&try_allocate); |
| 2494 | 2495 |
| 2495 // Patch the arguments.length and the parameters pointer. | 2496 // Patch the arguments.length and the parameters pointer. |
| 2496 __ bind(&adaptor_frame); | 2497 __ bind(&adaptor_frame); |
| 2497 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2498 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2498 __ movq(Operand(rsp, 1 * kPointerSize), rcx); | 2499 __ movq(StackOperandForArgument(1 * kPointerSize), rcx); |
| 2499 __ SmiToInteger64(rcx, rcx); | 2500 __ SmiToInteger64(rcx, rcx); |
| 2500 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, | 2501 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, |
| 2501 StandardFrameConstants::kCallerSPOffset)); | 2502 StandardFrameConstants::kCallerSPOffset)); |
| 2502 __ movq(Operand(rsp, 2 * kPointerSize), rdx); | 2503 __ movq(StackOperandForArgument(2 * kPointerSize), rdx); |
| 2503 | 2504 |
| 2504 // Try the new space allocation. Start out with computing the size of | 2505 // Try the new space allocation. Start out with computing the size of |
| 2505 // the arguments object and the elements array. | 2506 // the arguments object and the elements array. |
| 2506 Label add_arguments_object; | 2507 Label add_arguments_object; |
| 2507 __ bind(&try_allocate); | 2508 __ bind(&try_allocate); |
| 2508 __ testq(rcx, rcx); | 2509 __ testq(rcx, rcx); |
| 2509 __ j(zero, &add_arguments_object, Label::kNear); | 2510 __ j(zero, &add_arguments_object, Label::kNear); |
| 2510 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); | 2511 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); |
| 2511 __ bind(&add_arguments_object); | 2512 __ bind(&add_arguments_object); |
| 2512 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); | 2513 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); |
| 2513 | 2514 |
| 2514 // Do the allocation of both objects in one go. | 2515 // Do the allocation of both objects in one go. |
| 2515 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); | 2516 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); |
| 2516 | 2517 |
| 2517 // Get the arguments boilerplate from the current native context. | 2518 // Get the arguments boilerplate from the current native context. |
| 2518 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 2519 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 2519 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); | 2520 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); |
| 2520 const int offset = | 2521 const int offset = |
| 2521 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); | 2522 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); |
| 2522 __ movq(rdi, Operand(rdi, offset)); | 2523 __ movq(rdi, Operand(rdi, offset)); |
| 2523 | 2524 |
| 2524 // Copy the JS object part. | 2525 // Copy the JS object part. |
| 2525 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 2526 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
| 2526 __ movq(rbx, FieldOperand(rdi, i)); | 2527 __ movq(rbx, FieldOperand(rdi, i)); |
| 2527 __ movq(FieldOperand(rax, i), rbx); | 2528 __ movq(FieldOperand(rax, i), rbx); |
| 2528 } | 2529 } |
| 2529 | 2530 |
| 2530 // Get the length (smi tagged) and set that as an in-object property too. | 2531 // Get the length (smi tagged) and set that as an in-object property too. |
| 2531 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 2532 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 2532 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 2533 __ movq(rcx, StackOperandForArgument(1 * kPointerSize)); |
| 2533 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 2534 __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| 2534 Heap::kArgumentsLengthIndex * kPointerSize), | 2535 Heap::kArgumentsLengthIndex * kPointerSize), |
| 2535 rcx); | 2536 rcx); |
| 2536 | 2537 |
| 2537 // If there are no actual arguments, we're done. | 2538 // If there are no actual arguments, we're done. |
| 2538 Label done; | 2539 Label done; |
| 2539 __ testq(rcx, rcx); | 2540 __ testq(rcx, rcx); |
| 2540 __ j(zero, &done); | 2541 __ j(zero, &done); |
| 2541 | 2542 |
| 2542 // Get the parameters pointer from the stack. | 2543 // Get the parameters pointer from the stack. |
| 2543 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); | 2544 __ movq(rdx, StackOperandForArgument(2 * kPointerSize)); |
| 2544 | 2545 |
| 2545 // Set up the elements pointer in the allocated arguments object and | 2546 // Set up the elements pointer in the allocated arguments object and |
| 2546 // initialize the header in the elements fixed array. | 2547 // initialize the header in the elements fixed array. |
| 2547 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); | 2548 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); |
| 2548 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 2549 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
| 2549 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 2550 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
| 2550 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); | 2551 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
| 2551 | 2552 |
| 2552 | 2553 |
| 2553 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 2554 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
| (...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3016 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); | 3017 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); |
| 3017 __ jmp(&check_underlying); | 3018 __ jmp(&check_underlying); |
| 3018 #endif // V8_INTERPRETED_REGEXP | 3019 #endif // V8_INTERPRETED_REGEXP |
| 3019 } | 3020 } |
| 3020 | 3021 |
| 3021 | 3022 |
| 3022 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | 3023 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
| 3023 const int kMaxInlineLength = 100; | 3024 const int kMaxInlineLength = 100; |
| 3024 Label slowcase; | 3025 Label slowcase; |
| 3025 Label done; | 3026 Label done; |
| 3026 __ movq(r8, Operand(rsp, kPointerSize * 3)); | 3027 __ movq(r8, StackOperandForArgument(3 * kPointerSize)); |
| 3027 __ JumpIfNotSmi(r8, &slowcase); | 3028 __ JumpIfNotSmi(r8, &slowcase); |
| 3028 __ SmiToInteger32(rbx, r8); | 3029 __ SmiToInteger32(rbx, r8); |
| 3029 __ cmpl(rbx, Immediate(kMaxInlineLength)); | 3030 __ cmpl(rbx, Immediate(kMaxInlineLength)); |
| 3030 __ j(above, &slowcase); | 3031 __ j(above, &slowcase); |
| 3031 // Smi-tagging is equivalent to multiplying by 2. | 3032 // Smi-tagging is equivalent to multiplying by 2. |
| 3032 STATIC_ASSERT(kSmiTag == 0); | 3033 STATIC_ASSERT(kSmiTag == 0); |
| 3033 STATIC_ASSERT(kSmiTagSize == 1); | 3034 STATIC_ASSERT(kSmiTagSize == 1); |
| 3034 // Allocate RegExpResult followed by FixedArray with size in rbx. | 3035 // Allocate RegExpResult followed by FixedArray with size in rbx. |
| 3035 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] | 3036 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] |
| 3036 // Elements: [Map][Length][..elements..] | 3037 // Elements: [Map][Length][..elements..] |
| (...skipping 17 matching lines...) Expand all Loading... |
| 3054 | 3055 |
| 3055 // Set empty properties FixedArray. | 3056 // Set empty properties FixedArray. |
| 3056 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); | 3057 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); |
| 3057 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); | 3058 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); |
| 3058 | 3059 |
| 3059 // Set elements to point to FixedArray allocated right after the JSArray. | 3060 // Set elements to point to FixedArray allocated right after the JSArray. |
| 3060 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); | 3061 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); |
| 3061 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); | 3062 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); |
| 3062 | 3063 |
| 3063 // Set input, index and length fields from arguments. | 3064 // Set input, index and length fields from arguments. |
| 3064 __ movq(r8, Operand(rsp, kPointerSize * 1)); | 3065 __ movq(r8, StackOperandForArgument(1 * kPointerSize)); |
| 3065 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); | 3066 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); |
| 3066 __ movq(r8, Operand(rsp, kPointerSize * 2)); | 3067 __ movq(r8, StackOperandForArgument(2 * kPointerSize)); |
| 3067 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); | 3068 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); |
| 3068 __ movq(r8, Operand(rsp, kPointerSize * 3)); | 3069 __ movq(r8, StackOperandForArgument(3 * kPointerSize)); |
| 3069 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); | 3070 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); |
| 3070 | 3071 |
| 3071 // Fill out the elements FixedArray. | 3072 // Fill out the elements FixedArray. |
| 3072 // rax: JSArray. | 3073 // rax: JSArray. |
| 3073 // rcx: FixedArray. | 3074 // rcx: FixedArray. |
| 3074 // rbx: Number of elements in array as int32. | 3075 // rbx: Number of elements in array as int32. |
| 3075 | 3076 |
| 3076 // Set map. | 3077 // Set map. |
| 3077 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 3078 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
| 3078 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); | 3079 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3189 // but times_twice_pointer_size (multiplication by 16) scale factor | 3190 // but times_twice_pointer_size (multiplication by 16) scale factor |
| 3190 // is not supported by addrmode on x64 platform. | 3191 // is not supported by addrmode on x64 platform. |
| 3191 // So we have to premultiply entry index before lookup. | 3192 // So we have to premultiply entry index before lookup. |
| 3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); | 3193 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); |
| 3193 } | 3194 } |
| 3194 | 3195 |
| 3195 | 3196 |
| 3196 void NumberToStringStub::Generate(MacroAssembler* masm) { | 3197 void NumberToStringStub::Generate(MacroAssembler* masm) { |
| 3197 Label runtime; | 3198 Label runtime; |
| 3198 | 3199 |
| 3199 __ movq(rbx, Operand(rsp, kPointerSize)); | 3200 __ movq(rbx, StackOperandForArgument(1 * kPointerSize)); |
| 3200 | 3201 |
| 3201 // Generate code to lookup number in the number string cache. | 3202 // Generate code to lookup number in the number string cache. |
| 3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); | 3203 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime); |
| 3203 __ ret(1 * kPointerSize); | 3204 __ ret(1 * kPointerSize); |
| 3204 | 3205 |
| 3205 __ bind(&runtime); | 3206 __ bind(&runtime); |
| 3206 // Handle number to string in the runtime system if not found in the cache. | 3207 // Handle number to string in the runtime system if not found in the cache. |
| 3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); | 3208 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); |
| 3208 } | 3209 } |
| 3209 | 3210 |
| (...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3609 Isolate* isolate = masm->isolate(); | 3610 Isolate* isolate = masm->isolate(); |
| 3610 Label slow, non_function; | 3611 Label slow, non_function; |
| 3611 | 3612 |
| 3612 // The receiver might implicitly be the global object. This is | 3613 // The receiver might implicitly be the global object. This is |
| 3613 // indicated by passing the hole as the receiver to the call | 3614 // indicated by passing the hole as the receiver to the call |
| 3614 // function stub. | 3615 // function stub. |
| 3615 if (ReceiverMightBeImplicit()) { | 3616 if (ReceiverMightBeImplicit()) { |
| 3616 Label call; | 3617 Label call; |
| 3617 // Get the receiver from the stack. | 3618 // Get the receiver from the stack. |
| 3618 // +1 ~ return address | 3619 // +1 ~ return address |
| 3619 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize)); | 3620 __ movq(rax, StackOperandForArgument((argc_ + 1) * kPointerSize)); |
| 3620 // Call as function is indicated with the hole. | 3621 // Call as function is indicated with the hole. |
| 3621 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | 3622 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
| 3622 __ j(not_equal, &call, Label::kNear); | 3623 __ j(not_equal, &call, Label::kNear); |
| 3623 // Patch the receiver on the stack with the global receiver object. | 3624 // Patch the receiver on the stack with the global receiver object. |
| 3624 __ movq(rcx, GlobalObjectOperand()); | 3625 __ movq(rcx, GlobalObjectOperand()); |
| 3625 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); | 3626 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); |
| 3626 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx); | 3627 __ movq(StackOperandForArgument((argc_ + 1) * kPointerSize), rcx); |
| 3627 __ bind(&call); | 3628 __ bind(&call); |
| 3628 } | 3629 } |
| 3629 | 3630 |
| 3630 // Check that the function really is a JavaScript function. | 3631 // Check that the function really is a JavaScript function. |
| 3631 __ JumpIfSmi(rdi, &non_function); | 3632 __ JumpIfSmi(rdi, &non_function); |
| 3632 // Goto slow case if we do not have a function. | 3633 // Goto slow case if we do not have a function. |
| 3633 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 3634 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 3634 __ j(not_equal, &slow); | 3635 __ j(not_equal, &slow); |
| 3635 | 3636 |
| 3636 if (RecordCallTarget()) { | 3637 if (RecordCallTarget()) { |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 3679 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
| 3679 { | 3680 { |
| 3680 Handle<Code> adaptor = | 3681 Handle<Code> adaptor = |
| 3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3682 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3682 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 3683 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 3683 } | 3684 } |
| 3684 | 3685 |
| 3685 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 3686 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 3686 // of the original receiver from the call site). | 3687 // of the original receiver from the call site). |
| 3687 __ bind(&non_function); | 3688 __ bind(&non_function); |
| 3688 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); | 3689 __ movq(StackOperandForArgument((argc_ + 1) * kPointerSize), rdi); |
| 3689 __ Set(rax, argc_); | 3690 __ Set(rax, argc_); |
| 3690 __ Set(rbx, 0); | 3691 __ Set(rbx, 0); |
| 3691 __ SetCallKind(rcx, CALL_AS_METHOD); | 3692 __ SetCallKind(rcx, CALL_AS_METHOD); |
| 3692 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 3693 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
| 3693 Handle<Code> adaptor = | 3694 Handle<Code> adaptor = |
| 3694 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline(); | 3695 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline(); |
| 3695 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3696 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 3696 } | 3697 } |
| 3697 | 3698 |
| 3698 | 3699 |
| (...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4228 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); | 4229 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); |
| 4229 __ JumpIfSmi(rax, &slow); | 4230 __ JumpIfSmi(rax, &slow); |
| 4230 | 4231 |
| 4231 // Check that the left hand is a JS object. Leave its map in rax. | 4232 // Check that the left hand is a JS object. Leave its map in rax. |
| 4232 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); | 4233 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); |
| 4233 __ j(below, &slow); | 4234 __ j(below, &slow); |
| 4234 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); | 4235 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); |
| 4235 __ j(above, &slow); | 4236 __ j(above, &slow); |
| 4236 | 4237 |
| 4237 // Get the prototype of the function. | 4238 // Get the prototype of the function. |
| 4238 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); | 4239 __ movq(rdx, StackOperandForArgument(1 * kPointerSize + extra_stack_space)); |
| 4239 // rdx is function, rax is map. | 4240 // rdx is function, rax is map. |
| 4240 | 4241 |
| 4241 // If there is a call site cache don't look in the global cache, but do the | 4242 // If there is a call site cache don't look in the global cache, but do the |
| 4242 // real lookup and update the call site cache. | 4243 // real lookup and update the call site cache. |
| 4243 if (!HasCallSiteInlineCheck()) { | 4244 if (!HasCallSiteInlineCheck()) { |
| 4244 // Look up the function and the map in the instanceof cache. | 4245 // Look up the function and the map in the instanceof cache. |
| 4245 Label miss; | 4246 Label miss; |
| 4246 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 4247 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 4247 __ j(not_equal, &miss, Label::kNear); | 4248 __ j(not_equal, &miss, Label::kNear); |
| 4248 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 4249 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| (...skipping 15 matching lines...) Expand all Loading... |
| 4264 // Register mapping: | 4265 // Register mapping: |
| 4265 // rax is object map. | 4266 // rax is object map. |
| 4266 // rdx is function. | 4267 // rdx is function. |
| 4267 // rbx is function prototype. | 4268 // rbx is function prototype. |
| 4268 if (!HasCallSiteInlineCheck()) { | 4269 if (!HasCallSiteInlineCheck()) { |
| 4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 4270 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 4271 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| 4271 } else { | 4272 } else { |
| 4272 // Get return address and delta to inlined map check. | 4273 // Get return address and delta to inlined map check. |
| 4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4274 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 4274 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4275 __ subq(kScratchRegister, StackOperandForArgument(1 * kPointerSize)); |
| 4275 if (FLAG_debug_code) { | 4276 if (FLAG_debug_code) { |
| 4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); | 4277 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
| 4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); | 4278 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
| 4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); | 4279 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); |
| 4279 } | 4280 } |
| 4280 __ movq(kScratchRegister, | 4281 __ movq(kScratchRegister, |
| 4281 Operand(kScratchRegister, kOffsetToMapCheckValue)); | 4282 Operand(kScratchRegister, kOffsetToMapCheckValue)); |
| 4282 __ movq(Operand(kScratchRegister, 0), rax); | 4283 __ movq(Operand(kScratchRegister, 0), rax); |
| 4283 } | 4284 } |
| 4284 | 4285 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 4305 STATIC_ASSERT(kSmiTag == 0); | 4306 STATIC_ASSERT(kSmiTag == 0); |
| 4306 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 4307 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
| 4307 } else { | 4308 } else { |
| 4308 // Store offset of true in the root array at the inline check site. | 4309 // Store offset of true in the root array at the inline check site. |
| 4309 int true_offset = 0x100 + | 4310 int true_offset = 0x100 + |
| 4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 4311 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
| 4311 // Assert it is a 1-byte signed value. | 4312 // Assert it is a 1-byte signed value. |
| 4312 ASSERT(true_offset >= 0 && true_offset < 0x100); | 4313 ASSERT(true_offset >= 0 && true_offset < 0x100); |
| 4313 __ movl(rax, Immediate(true_offset)); | 4314 __ movl(rax, Immediate(true_offset)); |
| 4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4315 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 4315 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4316 __ subq(kScratchRegister, StackOperandForArgument(1 * kPointerSize)); |
| 4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 4317 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| 4317 if (FLAG_debug_code) { | 4318 if (FLAG_debug_code) { |
| 4318 __ movl(rax, Immediate(kWordBeforeResultValue)); | 4319 __ movl(rax, Immediate(kWordBeforeResultValue)); |
| 4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 4320 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); | 4321 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); |
| 4321 } | 4322 } |
| 4322 __ Set(rax, 0); | 4323 __ Set(rax, 0); |
| 4323 } | 4324 } |
| 4324 __ ret(2 * kPointerSize + extra_stack_space); | 4325 __ ret(2 * kPointerSize + extra_stack_space); |
| 4325 | 4326 |
| 4326 __ bind(&is_not_instance); | 4327 __ bind(&is_not_instance); |
| 4327 if (!HasCallSiteInlineCheck()) { | 4328 if (!HasCallSiteInlineCheck()) { |
| 4328 // We have to store a non-zero value in the cache. | 4329 // We have to store a non-zero value in the cache. |
| 4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 4330 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 4330 } else { | 4331 } else { |
| 4331 // Store offset of false in the root array at the inline check site. | 4332 // Store offset of false in the root array at the inline check site. |
| 4332 int false_offset = 0x100 + | 4333 int false_offset = 0x100 + |
| 4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 4334 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
| 4334 // Assert it is a 1-byte signed value. | 4335 // Assert it is a 1-byte signed value. |
| 4335 ASSERT(false_offset >= 0 && false_offset < 0x100); | 4336 ASSERT(false_offset >= 0 && false_offset < 0x100); |
| 4336 __ movl(rax, Immediate(false_offset)); | 4337 __ movl(rax, Immediate(false_offset)); |
| 4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4338 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 4338 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4339 __ subq(kScratchRegister, StackOperandForArgument(1 * kPointerSize)); |
| 4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 4340 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| 4340 if (FLAG_debug_code) { | 4341 if (FLAG_debug_code) { |
| 4341 __ movl(rax, Immediate(kWordBeforeResultValue)); | 4342 __ movl(rax, Immediate(kWordBeforeResultValue)); |
| 4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 4343 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); | 4344 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 4344 } | 4345 } |
| 4345 } | 4346 } |
| 4346 __ ret(2 * kPointerSize + extra_stack_space); | 4347 __ ret(2 * kPointerSize + extra_stack_space); |
| 4347 | 4348 |
| 4348 // Slow-case: Go through the JavaScript implementation. | 4349 // Slow-case: Go through the JavaScript implementation. |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4494 | 4495 |
| 4495 __ Abort("Unexpected fallthrough from CharFromCode slow case"); | 4496 __ Abort("Unexpected fallthrough from CharFromCode slow case"); |
| 4496 } | 4497 } |
| 4497 | 4498 |
| 4498 | 4499 |
| 4499 void StringAddStub::Generate(MacroAssembler* masm) { | 4500 void StringAddStub::Generate(MacroAssembler* masm) { |
| 4500 Label call_runtime, call_builtin; | 4501 Label call_runtime, call_builtin; |
| 4501 Builtins::JavaScript builtin_id = Builtins::ADD; | 4502 Builtins::JavaScript builtin_id = Builtins::ADD; |
| 4502 | 4503 |
| 4503 // Load the two arguments. | 4504 // Load the two arguments. |
| 4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). | 4505 __ movq(rax, StackOperandForArgument(2 * kPointerSize)); // First (left). |
| 4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). | 4506 __ movq(rdx, StackOperandForArgument(1 * kPointerSize)); // Second (right). |
| 4506 | 4507 |
| 4507 // Make sure that both arguments are strings if not known in advance. | 4508 // Make sure that both arguments are strings if not known in advance. |
| 4508 // Otherwise, at least one of the arguments is definitely a string, | 4509 // Otherwise, at least one of the arguments is definitely a string, |
| 4509 // and we convert the one that is not known to be a string. | 4510 // and we convert the one that is not known to be a string. |
| 4510 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { | 4511 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { |
| 4511 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); | 4512 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT); |
| 4512 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); | 4513 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT); |
| 4513 __ JumpIfSmi(rax, &call_runtime); | 4514 __ JumpIfSmi(rax, &call_runtime); |
| 4514 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); | 4515 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); |
| 4515 __ j(above_equal, &call_runtime); | 4516 __ j(above_equal, &call_runtime); |
| (...skipping 986 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5502 | 5503 |
| 5503 | 5504 |
| 5504 void StringCompareStub::Generate(MacroAssembler* masm) { | 5505 void StringCompareStub::Generate(MacroAssembler* masm) { |
| 5505 Label runtime; | 5506 Label runtime; |
| 5506 | 5507 |
| 5507 // Stack frame on entry. | 5508 // Stack frame on entry. |
| 5508 // rsp[0] : return address | 5509 // rsp[0] : return address |
| 5509 // rsp[8] : right string | 5510 // rsp[8] : right string |
| 5510 // rsp[16] : left string | 5511 // rsp[16] : left string |
| 5511 | 5512 |
| 5512 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left | 5513 __ movq(rdx, StackOperandForArgument(2 * kPointerSize)); // left |
| 5513 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right | 5514 __ movq(rax, StackOperandForArgument(1 * kPointerSize)); // right |
| 5514 | 5515 |
| 5515 // Check for identity. | 5516 // Check for identity. |
| 5516 Label not_same; | 5517 Label not_same; |
| 5517 __ cmpq(rdx, rax); | 5518 __ cmpq(rdx, rax); |
| 5518 __ j(not_equal, ¬_same, Label::kNear); | 5519 __ j(not_equal, ¬_same, Label::kNear); |
| 5519 __ Move(rax, Smi::FromInt(EQUAL)); | 5520 __ Move(rax, Smi::FromInt(EQUAL)); |
| 5520 Counters* counters = masm->isolate()->counters(); | 5521 Counters* counters = masm->isolate()->counters(); |
| 5521 __ IncrementCounter(counters->string_compare_native(), 1); | 5522 __ IncrementCounter(counters->string_compare_native(), 1); |
| 5522 __ ret(2 * kPointerSize); | 5523 __ ret(2 * kPointerSize); |
| 5523 | 5524 |
| (...skipping 494 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6018 __ decl(scratch); | 6019 __ decl(scratch); |
| 6019 __ push(scratch); | 6020 __ push(scratch); |
| 6020 | 6021 |
| 6021 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 6022 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
| 6022 // not equal to the name and kProbes-th slot is not used (its name is the | 6023 // not equal to the name and kProbes-th slot is not used (its name is the |
| 6023 // undefined value), it guarantees the hash table doesn't contain the | 6024 // undefined value), it guarantees the hash table doesn't contain the |
| 6024 // property. It's true even if some slots represent deleted properties | 6025 // property. It's true even if some slots represent deleted properties |
| 6025 // (their names are the null value). | 6026 // (their names are the null value). |
| 6026 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 6027 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
| 6027 // Compute the masked index: (hash + i + i * i) & mask. | 6028 // Compute the masked index: (hash + i + i * i) & mask. |
| 6028 __ movq(scratch, Operand(rsp, 2 * kPointerSize)); | 6029 __ movq(scratch, StackOperandForArgument(2 * kPointerSize)); |
| 6029 if (i > 0) { | 6030 if (i > 0) { |
| 6030 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); | 6031 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); |
| 6031 } | 6032 } |
| 6032 __ and_(scratch, Operand(rsp, 0)); | 6033 __ and_(scratch, Operand(rsp, 0)); |
| 6033 | 6034 |
| 6034 // Scale the index by multiplying by the entry size. | 6035 // Scale the index by multiplying by the entry size. |
| 6035 ASSERT(NameDictionary::kEntrySize == 3); | 6036 ASSERT(NameDictionary::kEntrySize == 3); |
| 6036 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 6037 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
| 6037 | 6038 |
| 6038 // Having undefined at this place means the name is not contained. | 6039 // Having undefined at this place means the name is not contained. |
| 6039 __ movq(scratch, Operand(dictionary_, | 6040 __ movq(scratch, Operand(dictionary_, |
| 6040 index_, | 6041 index_, |
| 6041 times_pointer_size, | 6042 times_pointer_size, |
| 6042 kElementsStartOffset - kHeapObjectTag)); | 6043 kElementsStartOffset - kHeapObjectTag)); |
| 6043 | 6044 |
| 6044 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); | 6045 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); |
| 6045 __ j(equal, ¬_in_dictionary); | 6046 __ j(equal, ¬_in_dictionary); |
| 6046 | 6047 |
| 6047 // Stop if found the property. | 6048 // Stop if found the property. |
| 6048 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); | 6049 __ cmpq(scratch, StackOperandForArgument(3 * kPointerSize)); |
| 6049 __ j(equal, &in_dictionary); | 6050 __ j(equal, &in_dictionary); |
| 6050 | 6051 |
| 6051 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 6052 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
| 6052 // If we hit a key that is not a unique name during negative | 6053 // If we hit a key that is not a unique name during negative |
| 6053 // lookup we have to bailout as this key might be equal to the | 6054 // lookup we have to bailout as this key might be equal to the |
| 6054 // key we are looking for. | 6055 // key we are looking for. |
| 6055 | 6056 |
| 6056 // Check if the entry name is not a unique name. | 6057 // Check if the entry name is not a unique name. |
| 6057 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 6058 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 6058 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), | 6059 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6390 // clobbers rbx, rdx, rdi | 6391 // clobbers rbx, rdx, rdi |
| 6391 // ----------------------------------- | 6392 // ----------------------------------- |
| 6392 | 6393 |
| 6393 Label element_done; | 6394 Label element_done; |
| 6394 Label double_elements; | 6395 Label double_elements; |
| 6395 Label smi_element; | 6396 Label smi_element; |
| 6396 Label slow_elements; | 6397 Label slow_elements; |
| 6397 Label fast_elements; | 6398 Label fast_elements; |
| 6398 | 6399 |
| 6399 // Get array literal index, array literal and its map. | 6400 // Get array literal index, array literal and its map. |
| 6400 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 6401 __ movq(rdx, StackOperandForArgument(1 * kPointerSize)); |
| 6401 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 6402 __ movq(rbx, StackOperandForArgument(2 * kPointerSize)); |
| 6402 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); | 6403 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset)); |
| 6403 | 6404 |
| 6404 __ CheckFastElements(rdi, &double_elements); | 6405 __ CheckFastElements(rdi, &double_elements); |
| 6405 | 6406 |
| 6406 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS | 6407 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS |
| 6407 __ JumpIfSmi(rax, &smi_element); | 6408 __ JumpIfSmi(rax, &smi_element); |
| 6408 __ CheckFastSmiElements(rdi, &fast_elements); | 6409 __ CheckFastSmiElements(rdi, &fast_elements); |
| 6409 | 6410 |
| 6410 // Store into the array literal requires a elements transition. Call into | 6411 // Store into the array literal requires a elements transition. Call into |
| 6411 // the runtime. | 6412 // the runtime. |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6560 Handle<Object> undefined_sentinel( | 6561 Handle<Object> undefined_sentinel( |
| 6561 masm->isolate()->heap()->undefined_value(), | 6562 masm->isolate()->heap()->undefined_value(), |
| 6562 masm->isolate()); | 6563 masm->isolate()); |
| 6563 | 6564 |
| 6564 // is the low bit set? If so, we are holey and that is good. | 6565 // is the low bit set? If so, we are holey and that is good. |
| 6565 __ testb(rdx, Immediate(1)); | 6566 __ testb(rdx, Immediate(1)); |
| 6566 Label normal_sequence; | 6567 Label normal_sequence; |
| 6567 __ j(not_zero, &normal_sequence); | 6568 __ j(not_zero, &normal_sequence); |
| 6568 | 6569 |
| 6569 // look at the first argument | 6570 // look at the first argument |
| 6570 __ movq(rcx, Operand(rsp, kPointerSize)); | 6571 __ movq(rcx, StackOperandForArgument(1 * kPointerSize)); |
| 6571 __ testq(rcx, rcx); | 6572 __ testq(rcx, rcx); |
| 6572 __ j(zero, &normal_sequence); | 6573 __ j(zero, &normal_sequence); |
| 6573 | 6574 |
| 6574 // We are going to create a holey array, but our kind is non-holey. | 6575 // We are going to create a holey array, but our kind is non-holey. |
| 6575 // Fix kind and retry (only if we have an allocation site in the cell). | 6576 // Fix kind and retry (only if we have an allocation site in the cell). |
| 6576 __ incl(rdx); | 6577 __ incl(rdx); |
| 6577 __ Cmp(rbx, undefined_sentinel); | 6578 __ Cmp(rbx, undefined_sentinel); |
| 6578 __ j(equal, &normal_sequence); | 6579 __ j(equal, &normal_sequence); |
| 6579 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | 6580 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
| 6580 Handle<Map> allocation_site_map( | 6581 Handle<Map> allocation_site_map( |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6739 InternalArrayNoArgumentConstructorStub stub0(kind); | 6740 InternalArrayNoArgumentConstructorStub stub0(kind); |
| 6740 __ TailCallStub(&stub0); | 6741 __ TailCallStub(&stub0); |
| 6741 | 6742 |
| 6742 __ bind(¬_zero_case); | 6743 __ bind(¬_zero_case); |
| 6743 __ cmpl(rax, Immediate(1)); | 6744 __ cmpl(rax, Immediate(1)); |
| 6744 __ j(greater, ¬_one_case); | 6745 __ j(greater, ¬_one_case); |
| 6745 | 6746 |
| 6746 if (IsFastPackedElementsKind(kind)) { | 6747 if (IsFastPackedElementsKind(kind)) { |
| 6747 // We might need to create a holey array | 6748 // We might need to create a holey array |
| 6748 // look at the first argument | 6749 // look at the first argument |
| 6749 __ movq(rcx, Operand(rsp, kPointerSize)); | 6750 __ movq(rcx, StackOperandForArgument(1 * kPointerSize)); |
| 6750 __ testq(rcx, rcx); | 6751 __ testq(rcx, rcx); |
| 6751 __ j(zero, &normal_sequence); | 6752 __ j(zero, &normal_sequence); |
| 6752 | 6753 |
| 6753 InternalArraySingleArgumentConstructorStub | 6754 InternalArraySingleArgumentConstructorStub |
| 6754 stub1_holey(GetHoleyElementsKind(kind)); | 6755 stub1_holey(GetHoleyElementsKind(kind)); |
| 6755 __ TailCallStub(&stub1_holey); | 6756 __ TailCallStub(&stub1_holey); |
| 6756 } | 6757 } |
| 6757 | 6758 |
| 6758 __ bind(&normal_sequence); | 6759 __ bind(&normal_sequence); |
| 6759 InternalArraySingleArgumentConstructorStub stub1(kind); | 6760 InternalArraySingleArgumentConstructorStub stub1(kind); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6816 __ bind(&fast_elements_case); | 6817 __ bind(&fast_elements_case); |
| 6817 GenerateCase(masm, FAST_ELEMENTS); | 6818 GenerateCase(masm, FAST_ELEMENTS); |
| 6818 } | 6819 } |
| 6819 | 6820 |
| 6820 | 6821 |
| 6821 #undef __ | 6822 #undef __ |
| 6822 | 6823 |
| 6823 } } // namespace v8::internal | 6824 } } // namespace v8::internal |
| 6824 | 6825 |
| 6825 #endif // V8_TARGET_ARCH_X64 | 6826 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |