OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
42 | 42 |
43 void ToNumberStub::Generate(MacroAssembler* masm) { | 43 void ToNumberStub::Generate(MacroAssembler* masm) { |
44 // The ToNumber stub takes one argument in eax. | 44 // The ToNumber stub takes one argument in eax. |
45 Label check_heap_number, call_builtin; | 45 Label check_heap_number, call_builtin; |
46 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear); | 46 __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear); |
47 __ ret(0); | 47 __ ret(0); |
48 | 48 |
49 __ bind(&check_heap_number); | 49 __ bind(&check_heap_number); |
50 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); | 50 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); |
51 Factory* factory = masm->isolate()->factory(); | 51 Factory* factory = masm->isolate()->factory(); |
52 __ cmp(Operand(ebx), Immediate(factory->heap_number_map())); | 52 __ cmp(ebx, Immediate(factory->heap_number_map())); |
53 __ j(not_equal, &call_builtin, Label::kNear); | 53 __ j(not_equal, &call_builtin, Label::kNear); |
54 __ ret(0); | 54 __ ret(0); |
55 | 55 |
56 __ bind(&call_builtin); | 56 __ bind(&call_builtin); |
57 __ pop(ecx); // Pop return address. | 57 __ pop(ecx); // Pop return address. |
58 __ push(eax); | 58 __ push(eax); |
59 __ push(ecx); // Push return address. | 59 __ push(ecx); // Push return address. |
60 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); | 60 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); |
61 } | 61 } |
62 | 62 |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
143 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 143 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
144 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx); | 144 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx); |
145 | 145 |
146 // Initialize the rest of the slots to undefined. | 146 // Initialize the rest of the slots to undefined. |
147 __ mov(ebx, factory->undefined_value()); | 147 __ mov(ebx, factory->undefined_value()); |
148 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 148 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { |
149 __ mov(Operand(eax, Context::SlotOffset(i)), ebx); | 149 __ mov(Operand(eax, Context::SlotOffset(i)), ebx); |
150 } | 150 } |
151 | 151 |
152 // Return and remove the on-stack parameter. | 152 // Return and remove the on-stack parameter. |
153 __ mov(esi, Operand(eax)); | 153 __ mov(esi, eax); |
154 __ ret(1 * kPointerSize); | 154 __ ret(1 * kPointerSize); |
155 | 155 |
156 // Need to collect. Call into runtime system. | 156 // Need to collect. Call into runtime system. |
157 __ bind(&gc); | 157 __ bind(&gc); |
158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | 158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
159 } | 159 } |
160 | 160 |
161 | 161 |
162 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 162 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
163 // Stack layout on entry: | 163 // Stack layout on entry: |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
232 __ ret(3 * kPointerSize); | 232 __ ret(3 * kPointerSize); |
233 | 233 |
234 __ bind(&slow_case); | 234 __ bind(&slow_case); |
235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
236 } | 236 } |
237 | 237 |
238 | 238 |
239 // The stub expects its argument on the stack and returns its result in tos_: | 239 // The stub expects its argument on the stack and returns its result in tos_: |
240 // zero for false, and a non-zero value for true. | 240 // zero for false, and a non-zero value for true. |
241 void ToBooleanStub::Generate(MacroAssembler* masm) { | 241 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 242 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 243 // we cannot call anything that could cause a GC from this stub. |
242 Label patch; | 244 Label patch; |
243 Factory* factory = masm->isolate()->factory(); | 245 Factory* factory = masm->isolate()->factory(); |
244 const Register argument = eax; | 246 const Register argument = eax; |
245 const Register map = edx; | 247 const Register map = edx; |
246 | 248 |
247 if (!types_.IsEmpty()) { | 249 if (!types_.IsEmpty()) { |
248 __ mov(argument, Operand(esp, 1 * kPointerSize)); | 250 __ mov(argument, Operand(esp, 1 * kPointerSize)); |
249 } | 251 } |
250 | 252 |
251 // undefined -> false | 253 // undefined -> false |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
329 __ Set(tos_, Immediate(0)); | 331 __ Set(tos_, Immediate(0)); |
330 __ ret(1 * kPointerSize); | 332 __ ret(1 * kPointerSize); |
331 __ bind(¬_heap_number); | 333 __ bind(¬_heap_number); |
332 } | 334 } |
333 | 335 |
334 __ bind(&patch); | 336 __ bind(&patch); |
335 GenerateTypeTransition(masm); | 337 GenerateTypeTransition(masm); |
336 } | 338 } |
337 | 339 |
338 | 340 |
| 341 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 342 // We don't allow a GC during a store buffer overflow so there is no need to |
| 343 // store the registers in any particular way, but we do have to store and |
| 344 // restore them. |
| 345 __ pushad(); |
| 346 if (save_doubles_ == kSaveFPRegs) { |
| 347 CpuFeatures::Scope scope(SSE2); |
| 348 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
| 349 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
| 350 XMMRegister reg = XMMRegister::from_code(i); |
| 351 __ movdbl(Operand(esp, i * kDoubleSize), reg); |
| 352 } |
| 353 } |
| 354 const int argument_count = 1; |
| 355 |
| 356 AllowExternalCallThatCantCauseGC scope(masm); |
| 357 __ PrepareCallCFunction(argument_count, ecx); |
| 358 __ mov(Operand(esp, 0 * kPointerSize), |
| 359 Immediate(ExternalReference::isolate_address())); |
| 360 __ CallCFunction( |
| 361 ExternalReference::store_buffer_overflow_function(masm->isolate()), |
| 362 argument_count); |
| 363 if (save_doubles_ == kSaveFPRegs) { |
| 364 CpuFeatures::Scope scope(SSE2); |
| 365 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { |
| 366 XMMRegister reg = XMMRegister::from_code(i); |
| 367 __ movdbl(reg, Operand(esp, i * kDoubleSize)); |
| 368 } |
| 369 __ add(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); |
| 370 } |
| 371 __ popad(); |
| 372 __ ret(0); |
| 373 } |
| 374 |
| 375 |
339 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | 376 void ToBooleanStub::CheckOddball(MacroAssembler* masm, |
340 Type type, | 377 Type type, |
341 Heap::RootListIndex value, | 378 Heap::RootListIndex value, |
342 bool result) { | 379 bool result) { |
343 const Register argument = eax; | 380 const Register argument = eax; |
344 if (types_.Contains(type)) { | 381 if (types_.Contains(type)) { |
345 // If we see an expected oddball, return its ToBoolean value tos_. | 382 // If we see an expected oddball, return its ToBoolean value tos_. |
346 Label different_value; | 383 Label different_value; |
347 __ CompareRoot(argument, value); | 384 __ CompareRoot(argument, value); |
348 __ j(not_equal, &different_value, Label::kNear); | 385 __ j(not_equal, &different_value, Label::kNear); |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
463 // Get exponent word. | 500 // Get exponent word. |
464 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset)); | 501 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset)); |
465 // Get exponent alone in scratch2. | 502 // Get exponent alone in scratch2. |
466 __ mov(scratch2, scratch); | 503 __ mov(scratch2, scratch); |
467 __ and_(scratch2, HeapNumber::kExponentMask); | 504 __ and_(scratch2, HeapNumber::kExponentMask); |
468 if (use_sse3) { | 505 if (use_sse3) { |
469 CpuFeatures::Scope scope(SSE3); | 506 CpuFeatures::Scope scope(SSE3); |
470 // Check whether the exponent is too big for a 64 bit signed integer. | 507 // Check whether the exponent is too big for a 64 bit signed integer. |
471 static const uint32_t kTooBigExponent = | 508 static const uint32_t kTooBigExponent = |
472 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift; | 509 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift; |
473 __ cmp(Operand(scratch2), Immediate(kTooBigExponent)); | 510 __ cmp(scratch2, Immediate(kTooBigExponent)); |
474 __ j(greater_equal, conversion_failure); | 511 __ j(greater_equal, conversion_failure); |
475 // Load x87 register with heap number. | 512 // Load x87 register with heap number. |
476 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset)); | 513 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset)); |
477 // Reserve space for 64 bit answer. | 514 // Reserve space for 64 bit answer. |
478 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint. | 515 __ sub(esp, Immediate(sizeof(uint64_t))); // Nolint. |
479 // Do conversion, which cannot fail because we checked the exponent. | 516 // Do conversion, which cannot fail because we checked the exponent. |
480 __ fisttp_d(Operand(esp, 0)); | 517 __ fisttp_d(Operand(esp, 0)); |
481 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx. | 518 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx. |
482 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint. | 519 __ add(esp, Immediate(sizeof(uint64_t))); // Nolint. |
483 } else { | 520 } else { |
484 // Load ecx with zero. We use this either for the final shift or | 521 // Load ecx with zero. We use this either for the final shift or |
485 // for the answer. | 522 // for the answer. |
486 __ xor_(ecx, Operand(ecx)); | 523 __ xor_(ecx, ecx); |
487 // Check whether the exponent matches a 32 bit signed int that cannot be | 524 // Check whether the exponent matches a 32 bit signed int that cannot be |
488 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the | 525 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the |
489 // exponent is 30 (biased). This is the exponent that we are fastest at and | 526 // exponent is 30 (biased). This is the exponent that we are fastest at and |
490 // also the highest exponent we can handle here. | 527 // also the highest exponent we can handle here. |
491 const uint32_t non_smi_exponent = | 528 const uint32_t non_smi_exponent = |
492 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; | 529 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift; |
493 __ cmp(Operand(scratch2), Immediate(non_smi_exponent)); | 530 __ cmp(scratch2, Immediate(non_smi_exponent)); |
494 // If we have a match of the int32-but-not-Smi exponent then skip some | 531 // If we have a match of the int32-but-not-Smi exponent then skip some |
495 // logic. | 532 // logic. |
496 __ j(equal, &right_exponent, Label::kNear); | 533 __ j(equal, &right_exponent, Label::kNear); |
497 // If the exponent is higher than that then go to slow case. This catches | 534 // If the exponent is higher than that then go to slow case. This catches |
498 // numbers that don't fit in a signed int32, infinities and NaNs. | 535 // numbers that don't fit in a signed int32, infinities and NaNs. |
499 __ j(less, &normal_exponent, Label::kNear); | 536 __ j(less, &normal_exponent, Label::kNear); |
500 | 537 |
501 { | 538 { |
502 // Handle a big exponent. The only reason we have this code is that the | 539 // Handle a big exponent. The only reason we have this code is that the |
503 // >>> operator has a tendency to generate numbers with an exponent of 31. | 540 // >>> operator has a tendency to generate numbers with an exponent of 31. |
504 const uint32_t big_non_smi_exponent = | 541 const uint32_t big_non_smi_exponent = |
505 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift; | 542 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift; |
506 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent)); | 543 __ cmp(scratch2, Immediate(big_non_smi_exponent)); |
507 __ j(not_equal, conversion_failure); | 544 __ j(not_equal, conversion_failure); |
508 // We have the big exponent, typically from >>>. This means the number is | 545 // We have the big exponent, typically from >>>. This means the number is |
509 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa. | 546 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa. |
510 __ mov(scratch2, scratch); | 547 __ mov(scratch2, scratch); |
511 __ and_(scratch2, HeapNumber::kMantissaMask); | 548 __ and_(scratch2, HeapNumber::kMantissaMask); |
512 // Put back the implicit 1. | 549 // Put back the implicit 1. |
513 __ or_(scratch2, 1 << HeapNumber::kExponentShift); | 550 __ or_(scratch2, 1 << HeapNumber::kExponentShift); |
514 // Shift up the mantissa bits to take up the space the exponent used to | 551 // Shift up the mantissa bits to take up the space the exponent used to |
515 // take. We just orred in the implicit bit so that took care of one and | 552 // take. We just orred in the implicit bit so that took care of one and |
516 // we want to use the full unsigned range so we subtract 1 bit from the | 553 // we want to use the full unsigned range so we subtract 1 bit from the |
517 // shift distance. | 554 // shift distance. |
518 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1; | 555 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1; |
519 __ shl(scratch2, big_shift_distance); | 556 __ shl(scratch2, big_shift_distance); |
520 // Get the second half of the double. | 557 // Get the second half of the double. |
521 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset)); | 558 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset)); |
522 // Shift down 21 bits to get the most significant 11 bits or the low | 559 // Shift down 21 bits to get the most significant 11 bits or the low |
523 // mantissa word. | 560 // mantissa word. |
524 __ shr(ecx, 32 - big_shift_distance); | 561 __ shr(ecx, 32 - big_shift_distance); |
525 __ or_(ecx, Operand(scratch2)); | 562 __ or_(ecx, scratch2); |
526 // We have the answer in ecx, but we may need to negate it. | 563 // We have the answer in ecx, but we may need to negate it. |
527 __ test(scratch, Operand(scratch)); | 564 __ test(scratch, scratch); |
528 __ j(positive, &done, Label::kNear); | 565 __ j(positive, &done, Label::kNear); |
529 __ neg(ecx); | 566 __ neg(ecx); |
530 __ jmp(&done, Label::kNear); | 567 __ jmp(&done, Label::kNear); |
531 } | 568 } |
532 | 569 |
533 __ bind(&normal_exponent); | 570 __ bind(&normal_exponent); |
534 // Exponent word in scratch, exponent part of exponent word in scratch2. | 571 // Exponent word in scratch, exponent part of exponent word in scratch2. |
535 // Zero in ecx. | 572 // Zero in ecx. |
536 // We know the exponent is smaller than 30 (biased). If it is less than | 573 // We know the exponent is smaller than 30 (biased). If it is less than |
537 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie | 574 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie |
538 // it rounds to zero. | 575 // it rounds to zero. |
539 const uint32_t zero_exponent = | 576 const uint32_t zero_exponent = |
540 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift; | 577 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift; |
541 __ sub(Operand(scratch2), Immediate(zero_exponent)); | 578 __ sub(scratch2, Immediate(zero_exponent)); |
542 // ecx already has a Smi zero. | 579 // ecx already has a Smi zero. |
543 __ j(less, &done, Label::kNear); | 580 __ j(less, &done, Label::kNear); |
544 | 581 |
545 // We have a shifted exponent between 0 and 30 in scratch2. | 582 // We have a shifted exponent between 0 and 30 in scratch2. |
546 __ shr(scratch2, HeapNumber::kExponentShift); | 583 __ shr(scratch2, HeapNumber::kExponentShift); |
547 __ mov(ecx, Immediate(30)); | 584 __ mov(ecx, Immediate(30)); |
548 __ sub(ecx, Operand(scratch2)); | 585 __ sub(ecx, scratch2); |
549 | 586 |
550 __ bind(&right_exponent); | 587 __ bind(&right_exponent); |
551 // Here ecx is the shift, scratch is the exponent word. | 588 // Here ecx is the shift, scratch is the exponent word. |
552 // Get the top bits of the mantissa. | 589 // Get the top bits of the mantissa. |
553 __ and_(scratch, HeapNumber::kMantissaMask); | 590 __ and_(scratch, HeapNumber::kMantissaMask); |
554 // Put back the implicit 1. | 591 // Put back the implicit 1. |
555 __ or_(scratch, 1 << HeapNumber::kExponentShift); | 592 __ or_(scratch, 1 << HeapNumber::kExponentShift); |
556 // Shift up the mantissa bits to take up the space the exponent used to | 593 // Shift up the mantissa bits to take up the space the exponent used to |
557 // take. We have kExponentShift + 1 significant bits int he low end of the | 594 // take. We have kExponentShift + 1 significant bits int he low end of the |
558 // word. Shift them to the top bits. | 595 // word. Shift them to the top bits. |
559 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; | 596 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2; |
560 __ shl(scratch, shift_distance); | 597 __ shl(scratch, shift_distance); |
561 // Get the second half of the double. For some exponents we don't | 598 // Get the second half of the double. For some exponents we don't |
562 // actually need this because the bits get shifted out again, but | 599 // actually need this because the bits get shifted out again, but |
563 // it's probably slower to test than just to do it. | 600 // it's probably slower to test than just to do it. |
564 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset)); | 601 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset)); |
565 // Shift down 22 bits to get the most significant 10 bits or the low | 602 // Shift down 22 bits to get the most significant 10 bits or the low |
566 // mantissa word. | 603 // mantissa word. |
567 __ shr(scratch2, 32 - shift_distance); | 604 __ shr(scratch2, 32 - shift_distance); |
568 __ or_(scratch2, Operand(scratch)); | 605 __ or_(scratch2, scratch); |
569 // Move down according to the exponent. | 606 // Move down according to the exponent. |
570 __ shr_cl(scratch2); | 607 __ shr_cl(scratch2); |
571 // Now the unsigned answer is in scratch2. We need to move it to ecx and | 608 // Now the unsigned answer is in scratch2. We need to move it to ecx and |
572 // we may need to fix the sign. | 609 // we may need to fix the sign. |
573 Label negative; | 610 Label negative; |
574 __ xor_(ecx, Operand(ecx)); | 611 __ xor_(ecx, ecx); |
575 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset)); | 612 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset)); |
576 __ j(greater, &negative, Label::kNear); | 613 __ j(greater, &negative, Label::kNear); |
577 __ mov(ecx, scratch2); | 614 __ mov(ecx, scratch2); |
578 __ jmp(&done, Label::kNear); | 615 __ jmp(&done, Label::kNear); |
579 __ bind(&negative); | 616 __ bind(&negative); |
580 __ sub(ecx, Operand(scratch2)); | 617 __ sub(ecx, scratch2); |
581 __ bind(&done); | 618 __ bind(&done); |
582 } | 619 } |
583 } | 620 } |
584 | 621 |
585 | 622 |
586 void UnaryOpStub::PrintName(StringStream* stream) { | 623 void UnaryOpStub::PrintName(StringStream* stream) { |
587 const char* op_name = Token::Name(op_); | 624 const char* op_name = Token::Name(op_); |
588 const char* overwrite_name = NULL; // Make g++ happy. | 625 const char* overwrite_name = NULL; // Make g++ happy. |
589 switch (mode_) { | 626 switch (mode_) { |
590 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | 627 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
672 Label* non_smi, | 709 Label* non_smi, |
673 Label* undo, | 710 Label* undo, |
674 Label* slow, | 711 Label* slow, |
675 Label::Distance non_smi_near, | 712 Label::Distance non_smi_near, |
676 Label::Distance undo_near, | 713 Label::Distance undo_near, |
677 Label::Distance slow_near) { | 714 Label::Distance slow_near) { |
678 // Check whether the value is a smi. | 715 // Check whether the value is a smi. |
679 __ JumpIfNotSmi(eax, non_smi, non_smi_near); | 716 __ JumpIfNotSmi(eax, non_smi, non_smi_near); |
680 | 717 |
681 // We can't handle -0 with smis, so use a type transition for that case. | 718 // We can't handle -0 with smis, so use a type transition for that case. |
682 __ test(eax, Operand(eax)); | 719 __ test(eax, eax); |
683 __ j(zero, slow, slow_near); | 720 __ j(zero, slow, slow_near); |
684 | 721 |
685 // Try optimistic subtraction '0 - value', saving operand in eax for undo. | 722 // Try optimistic subtraction '0 - value', saving operand in eax for undo. |
686 __ mov(edx, Operand(eax)); | 723 __ mov(edx, eax); |
687 __ Set(eax, Immediate(0)); | 724 __ Set(eax, Immediate(0)); |
688 __ sub(eax, Operand(edx)); | 725 __ sub(eax, edx); |
689 __ j(overflow, undo, undo_near); | 726 __ j(overflow, undo, undo_near); |
690 __ ret(0); | 727 __ ret(0); |
691 } | 728 } |
692 | 729 |
693 | 730 |
694 void UnaryOpStub::GenerateSmiCodeBitNot( | 731 void UnaryOpStub::GenerateSmiCodeBitNot( |
695 MacroAssembler* masm, | 732 MacroAssembler* masm, |
696 Label* non_smi, | 733 Label* non_smi, |
697 Label::Distance non_smi_near) { | 734 Label::Distance non_smi_near) { |
698 // Check whether the value is a smi. | 735 // Check whether the value is a smi. |
699 __ JumpIfNotSmi(eax, non_smi, non_smi_near); | 736 __ JumpIfNotSmi(eax, non_smi, non_smi_near); |
700 | 737 |
701 // Flip bits and revert inverted smi-tag. | 738 // Flip bits and revert inverted smi-tag. |
702 __ not_(eax); | 739 __ not_(eax); |
703 __ and_(eax, ~kSmiTagMask); | 740 __ and_(eax, ~kSmiTagMask); |
704 __ ret(0); | 741 __ ret(0); |
705 } | 742 } |
706 | 743 |
707 | 744 |
708 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { | 745 void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { |
709 __ mov(eax, Operand(edx)); | 746 __ mov(eax, edx); |
710 } | 747 } |
711 | 748 |
712 | 749 |
713 // TODO(svenpanne): Use virtual functions instead of switch. | 750 // TODO(svenpanne): Use virtual functions instead of switch. |
714 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 751 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
715 switch (op_) { | 752 switch (op_) { |
716 case Token::SUB: | 753 case Token::SUB: |
717 GenerateHeapNumberStubSub(masm); | 754 GenerateHeapNumberStubSub(masm); |
718 break; | 755 break; |
719 case Token::BIT_NOT: | 756 case Token::BIT_NOT: |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
753 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 790 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
754 Label* slow) { | 791 Label* slow) { |
755 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); | 792 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); |
756 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); | 793 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); |
757 __ j(not_equal, slow); | 794 __ j(not_equal, slow); |
758 | 795 |
759 if (mode_ == UNARY_OVERWRITE) { | 796 if (mode_ == UNARY_OVERWRITE) { |
760 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), | 797 __ xor_(FieldOperand(eax, HeapNumber::kExponentOffset), |
761 Immediate(HeapNumber::kSignMask)); // Flip sign. | 798 Immediate(HeapNumber::kSignMask)); // Flip sign. |
762 } else { | 799 } else { |
763 __ mov(edx, Operand(eax)); | 800 __ mov(edx, eax); |
764 // edx: operand | 801 // edx: operand |
765 | 802 |
766 Label slow_allocate_heapnumber, heapnumber_allocated; | 803 Label slow_allocate_heapnumber, heapnumber_allocated; |
767 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); | 804 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); |
768 __ jmp(&heapnumber_allocated, Label::kNear); | 805 __ jmp(&heapnumber_allocated, Label::kNear); |
769 | 806 |
770 __ bind(&slow_allocate_heapnumber); | 807 __ bind(&slow_allocate_heapnumber); |
771 __ EnterInternalFrame(); | 808 { |
772 __ push(edx); | 809 FrameScope scope(masm, StackFrame::INTERNAL); |
773 __ CallRuntime(Runtime::kNumberAlloc, 0); | 810 __ push(edx); |
774 __ pop(edx); | 811 __ CallRuntime(Runtime::kNumberAlloc, 0); |
775 __ LeaveInternalFrame(); | 812 __ pop(edx); |
| 813 } |
776 | 814 |
777 __ bind(&heapnumber_allocated); | 815 __ bind(&heapnumber_allocated); |
778 // eax: allocated 'empty' number | 816 // eax: allocated 'empty' number |
779 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); | 817 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); |
780 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. | 818 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. |
781 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); | 819 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); |
782 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); | 820 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); |
783 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); | 821 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); |
784 } | 822 } |
785 __ ret(0); | 823 __ ret(0); |
(...skipping 22 matching lines...) Expand all Loading... |
808 | 846 |
809 // Try to store the result in a heap number. | 847 // Try to store the result in a heap number. |
810 __ bind(&try_float); | 848 __ bind(&try_float); |
811 if (mode_ == UNARY_NO_OVERWRITE) { | 849 if (mode_ == UNARY_NO_OVERWRITE) { |
812 Label slow_allocate_heapnumber, heapnumber_allocated; | 850 Label slow_allocate_heapnumber, heapnumber_allocated; |
813 __ mov(ebx, eax); | 851 __ mov(ebx, eax); |
814 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber); | 852 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber); |
815 __ jmp(&heapnumber_allocated); | 853 __ jmp(&heapnumber_allocated); |
816 | 854 |
817 __ bind(&slow_allocate_heapnumber); | 855 __ bind(&slow_allocate_heapnumber); |
818 __ EnterInternalFrame(); | 856 { |
819 // Push the original HeapNumber on the stack. The integer value can't | 857 FrameScope scope(masm, StackFrame::INTERNAL); |
820 // be stored since it's untagged and not in the smi range (so we can't | 858 // Push the original HeapNumber on the stack. The integer value can't |
821 // smi-tag it). We'll recalculate the value after the GC instead. | 859 // be stored since it's untagged and not in the smi range (so we can't |
822 __ push(ebx); | 860 // smi-tag it). We'll recalculate the value after the GC instead. |
823 __ CallRuntime(Runtime::kNumberAlloc, 0); | 861 __ push(ebx); |
824 // New HeapNumber is in eax. | 862 __ CallRuntime(Runtime::kNumberAlloc, 0); |
825 __ pop(edx); | 863 // New HeapNumber is in eax. |
826 __ LeaveInternalFrame(); | 864 __ pop(edx); |
| 865 } |
827 // IntegerConvert uses ebx and edi as scratch registers. | 866 // IntegerConvert uses ebx and edi as scratch registers. |
828 // This conversion won't go slow-case. | 867 // This conversion won't go slow-case. |
829 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow); | 868 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow); |
830 __ not_(ecx); | 869 __ not_(ecx); |
831 | 870 |
832 __ bind(&heapnumber_allocated); | 871 __ bind(&heapnumber_allocated); |
833 } | 872 } |
834 if (CpuFeatures::IsSupported(SSE2)) { | 873 if (CpuFeatures::IsSupported(SSE2)) { |
835 CpuFeatures::Scope use_sse2(SSE2); | 874 CpuFeatures::Scope use_sse2(SSE2); |
836 __ cvtsi2sd(xmm0, Operand(ecx)); | 875 __ cvtsi2sd(xmm0, ecx); |
837 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); | 876 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); |
838 } else { | 877 } else { |
839 __ push(ecx); | 878 __ push(ecx); |
840 __ fild_s(Operand(esp, 0)); | 879 __ fild_s(Operand(esp, 0)); |
841 __ pop(ecx); | 880 __ pop(ecx); |
842 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 881 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
843 } | 882 } |
844 __ ret(0); | 883 __ ret(0); |
845 } | 884 } |
846 | 885 |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
940 // operation result to the caller of the stub. | 979 // operation result to the caller of the stub. |
941 __ TailCallExternalReference( | 980 __ TailCallExternalReference( |
942 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), | 981 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
943 masm->isolate()), | 982 masm->isolate()), |
944 5, | 983 5, |
945 1); | 984 1); |
946 } | 985 } |
947 | 986 |
948 | 987 |
949 void BinaryOpStub::Generate(MacroAssembler* masm) { | 988 void BinaryOpStub::Generate(MacroAssembler* masm) { |
| 989 // Explicitly allow generation of nested stubs. It is safe here because |
| 990 // generation code does not use any raw pointers. |
| 991 AllowStubCallsScope allow_stub_calls(masm, true); |
| 992 |
950 switch (operands_type_) { | 993 switch (operands_type_) { |
951 case BinaryOpIC::UNINITIALIZED: | 994 case BinaryOpIC::UNINITIALIZED: |
952 GenerateTypeTransition(masm); | 995 GenerateTypeTransition(masm); |
953 break; | 996 break; |
954 case BinaryOpIC::SMI: | 997 case BinaryOpIC::SMI: |
955 GenerateSmiStub(masm); | 998 GenerateSmiStub(masm); |
956 break; | 999 break; |
957 case BinaryOpIC::INT32: | 1000 case BinaryOpIC::INT32: |
958 GenerateInt32Stub(masm); | 1001 GenerateInt32Stub(masm); |
959 break; | 1002 break; |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1015 Comment smi_check_comment(masm, "-- Smi check arguments"); | 1058 Comment smi_check_comment(masm, "-- Smi check arguments"); |
1016 Label not_smis; | 1059 Label not_smis; |
1017 Register combined = ecx; | 1060 Register combined = ecx; |
1018 ASSERT(!left.is(combined) && !right.is(combined)); | 1061 ASSERT(!left.is(combined) && !right.is(combined)); |
1019 switch (op_) { | 1062 switch (op_) { |
1020 case Token::BIT_OR: | 1063 case Token::BIT_OR: |
1021 // Perform the operation into eax and smi check the result. Preserve | 1064 // Perform the operation into eax and smi check the result. Preserve |
1022 // eax in case the result is not a smi. | 1065 // eax in case the result is not a smi. |
1023 ASSERT(!left.is(ecx) && !right.is(ecx)); | 1066 ASSERT(!left.is(ecx) && !right.is(ecx)); |
1024 __ mov(ecx, right); | 1067 __ mov(ecx, right); |
1025 __ or_(right, Operand(left)); // Bitwise or is commutative. | 1068 __ or_(right, left); // Bitwise or is commutative. |
1026 combined = right; | 1069 combined = right; |
1027 break; | 1070 break; |
1028 | 1071 |
1029 case Token::BIT_XOR: | 1072 case Token::BIT_XOR: |
1030 case Token::BIT_AND: | 1073 case Token::BIT_AND: |
1031 case Token::ADD: | 1074 case Token::ADD: |
1032 case Token::SUB: | 1075 case Token::SUB: |
1033 case Token::MUL: | 1076 case Token::MUL: |
1034 case Token::DIV: | 1077 case Token::DIV: |
1035 case Token::MOD: | 1078 case Token::MOD: |
1036 __ mov(combined, right); | 1079 __ mov(combined, right); |
1037 __ or_(combined, Operand(left)); | 1080 __ or_(combined, left); |
1038 break; | 1081 break; |
1039 | 1082 |
1040 case Token::SHL: | 1083 case Token::SHL: |
1041 case Token::SAR: | 1084 case Token::SAR: |
1042 case Token::SHR: | 1085 case Token::SHR: |
1043 // Move the right operand into ecx for the shift operation, use eax | 1086 // Move the right operand into ecx for the shift operation, use eax |
1044 // for the smi check register. | 1087 // for the smi check register. |
1045 ASSERT(!left.is(ecx) && !right.is(ecx)); | 1088 ASSERT(!left.is(ecx) && !right.is(ecx)); |
1046 __ mov(ecx, right); | 1089 __ mov(ecx, right); |
1047 __ or_(right, Operand(left)); | 1090 __ or_(right, left); |
1048 combined = right; | 1091 combined = right; |
1049 break; | 1092 break; |
1050 | 1093 |
1051 default: | 1094 default: |
1052 break; | 1095 break; |
1053 } | 1096 } |
1054 | 1097 |
1055 // 3. Perform the smi check of the operands. | 1098 // 3. Perform the smi check of the operands. |
1056 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case. | 1099 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case. |
1057 __ JumpIfNotSmi(combined, ¬_smis); | 1100 __ JumpIfNotSmi(combined, ¬_smis); |
1058 | 1101 |
1059 // 4. Operands are both smis, perform the operation leaving the result in | 1102 // 4. Operands are both smis, perform the operation leaving the result in |
1060 // eax and check the result if necessary. | 1103 // eax and check the result if necessary. |
1061 Comment perform_smi(masm, "-- Perform smi operation"); | 1104 Comment perform_smi(masm, "-- Perform smi operation"); |
1062 Label use_fp_on_smis; | 1105 Label use_fp_on_smis; |
1063 switch (op_) { | 1106 switch (op_) { |
1064 case Token::BIT_OR: | 1107 case Token::BIT_OR: |
1065 // Nothing to do. | 1108 // Nothing to do. |
1066 break; | 1109 break; |
1067 | 1110 |
1068 case Token::BIT_XOR: | 1111 case Token::BIT_XOR: |
1069 ASSERT(right.is(eax)); | 1112 ASSERT(right.is(eax)); |
1070 __ xor_(right, Operand(left)); // Bitwise xor is commutative. | 1113 __ xor_(right, left); // Bitwise xor is commutative. |
1071 break; | 1114 break; |
1072 | 1115 |
1073 case Token::BIT_AND: | 1116 case Token::BIT_AND: |
1074 ASSERT(right.is(eax)); | 1117 ASSERT(right.is(eax)); |
1075 __ and_(right, Operand(left)); // Bitwise and is commutative. | 1118 __ and_(right, left); // Bitwise and is commutative. |
1076 break; | 1119 break; |
1077 | 1120 |
1078 case Token::SHL: | 1121 case Token::SHL: |
1079 // Remove tags from operands (but keep sign). | 1122 // Remove tags from operands (but keep sign). |
1080 __ SmiUntag(left); | 1123 __ SmiUntag(left); |
1081 __ SmiUntag(ecx); | 1124 __ SmiUntag(ecx); |
1082 // Perform the operation. | 1125 // Perform the operation. |
1083 __ shl_cl(left); | 1126 __ shl_cl(left); |
1084 // Check that the *signed* result fits in a smi. | 1127 // Check that the *signed* result fits in a smi. |
1085 __ cmp(left, 0xc0000000); | 1128 __ cmp(left, 0xc0000000); |
(...skipping 28 matching lines...) Expand all Loading... |
1114 // by 0 or 1 when handed a valid smi. | 1157 // by 0 or 1 when handed a valid smi. |
1115 __ test(left, Immediate(0xc0000000)); | 1158 __ test(left, Immediate(0xc0000000)); |
1116 __ j(not_zero, &use_fp_on_smis); | 1159 __ j(not_zero, &use_fp_on_smis); |
1117 // Tag the result and store it in register eax. | 1160 // Tag the result and store it in register eax. |
1118 __ SmiTag(left); | 1161 __ SmiTag(left); |
1119 __ mov(eax, left); | 1162 __ mov(eax, left); |
1120 break; | 1163 break; |
1121 | 1164 |
1122 case Token::ADD: | 1165 case Token::ADD: |
1123 ASSERT(right.is(eax)); | 1166 ASSERT(right.is(eax)); |
1124 __ add(right, Operand(left)); // Addition is commutative. | 1167 __ add(right, left); // Addition is commutative. |
1125 __ j(overflow, &use_fp_on_smis); | 1168 __ j(overflow, &use_fp_on_smis); |
1126 break; | 1169 break; |
1127 | 1170 |
1128 case Token::SUB: | 1171 case Token::SUB: |
1129 __ sub(left, Operand(right)); | 1172 __ sub(left, right); |
1130 __ j(overflow, &use_fp_on_smis); | 1173 __ j(overflow, &use_fp_on_smis); |
1131 __ mov(eax, left); | 1174 __ mov(eax, left); |
1132 break; | 1175 break; |
1133 | 1176 |
1134 case Token::MUL: | 1177 case Token::MUL: |
1135 // If the smi tag is 0 we can just leave the tag on one operand. | 1178 // If the smi tag is 0 we can just leave the tag on one operand. |
1136 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case. | 1179 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case. |
1137 // We can't revert the multiplication if the result is not a smi | 1180 // We can't revert the multiplication if the result is not a smi |
1138 // so save the right operand. | 1181 // so save the right operand. |
1139 __ mov(ebx, right); | 1182 __ mov(ebx, right); |
1140 // Remove tag from one of the operands (but keep sign). | 1183 // Remove tag from one of the operands (but keep sign). |
1141 __ SmiUntag(right); | 1184 __ SmiUntag(right); |
1142 // Do multiplication. | 1185 // Do multiplication. |
1143 __ imul(right, Operand(left)); // Multiplication is commutative. | 1186 __ imul(right, left); // Multiplication is commutative. |
1144 __ j(overflow, &use_fp_on_smis); | 1187 __ j(overflow, &use_fp_on_smis); |
1145 // Check for negative zero result. Use combined = left | right. | 1188 // Check for negative zero result. Use combined = left | right. |
1146 __ NegativeZeroTest(right, combined, &use_fp_on_smis); | 1189 __ NegativeZeroTest(right, combined, &use_fp_on_smis); |
1147 break; | 1190 break; |
1148 | 1191 |
1149 case Token::DIV: | 1192 case Token::DIV: |
1150 // We can't revert the division if the result is not a smi so | 1193 // We can't revert the division if the result is not a smi so |
1151 // save the left operand. | 1194 // save the left operand. |
1152 __ mov(edi, left); | 1195 __ mov(edi, left); |
1153 // Check for 0 divisor. | 1196 // Check for 0 divisor. |
1154 __ test(right, Operand(right)); | 1197 __ test(right, right); |
1155 __ j(zero, &use_fp_on_smis); | 1198 __ j(zero, &use_fp_on_smis); |
1156 // Sign extend left into edx:eax. | 1199 // Sign extend left into edx:eax. |
1157 ASSERT(left.is(eax)); | 1200 ASSERT(left.is(eax)); |
1158 __ cdq(); | 1201 __ cdq(); |
1159 // Divide edx:eax by right. | 1202 // Divide edx:eax by right. |
1160 __ idiv(right); | 1203 __ idiv(right); |
1161 // Check for the corner case of dividing the most negative smi by | 1204 // Check for the corner case of dividing the most negative smi by |
1162 // -1. We cannot use the overflow flag, since it is not set by idiv | 1205 // -1. We cannot use the overflow flag, since it is not set by idiv |
1163 // instruction. | 1206 // instruction. |
1164 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 1207 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
1165 __ cmp(eax, 0x40000000); | 1208 __ cmp(eax, 0x40000000); |
1166 __ j(equal, &use_fp_on_smis); | 1209 __ j(equal, &use_fp_on_smis); |
1167 // Check for negative zero result. Use combined = left | right. | 1210 // Check for negative zero result. Use combined = left | right. |
1168 __ NegativeZeroTest(eax, combined, &use_fp_on_smis); | 1211 __ NegativeZeroTest(eax, combined, &use_fp_on_smis); |
1169 // Check that the remainder is zero. | 1212 // Check that the remainder is zero. |
1170 __ test(edx, Operand(edx)); | 1213 __ test(edx, edx); |
1171 __ j(not_zero, &use_fp_on_smis); | 1214 __ j(not_zero, &use_fp_on_smis); |
1172 // Tag the result and store it in register eax. | 1215 // Tag the result and store it in register eax. |
1173 __ SmiTag(eax); | 1216 __ SmiTag(eax); |
1174 break; | 1217 break; |
1175 | 1218 |
1176 case Token::MOD: | 1219 case Token::MOD: |
1177 // Check for 0 divisor. | 1220 // Check for 0 divisor. |
1178 __ test(right, Operand(right)); | 1221 __ test(right, right); |
1179 __ j(zero, ¬_smis); | 1222 __ j(zero, ¬_smis); |
1180 | 1223 |
1181 // Sign extend left into edx:eax. | 1224 // Sign extend left into edx:eax. |
1182 ASSERT(left.is(eax)); | 1225 ASSERT(left.is(eax)); |
1183 __ cdq(); | 1226 __ cdq(); |
1184 // Divide edx:eax by right. | 1227 // Divide edx:eax by right. |
1185 __ idiv(right); | 1228 __ idiv(right); |
1186 // Check for negative zero result. Use combined = left | right. | 1229 // Check for negative zero result. Use combined = left | right. |
1187 __ NegativeZeroTest(edx, combined, slow); | 1230 __ NegativeZeroTest(edx, combined, slow); |
1188 // Move remainder to register eax. | 1231 // Move remainder to register eax. |
(...skipping 30 matching lines...) Expand all Loading... |
1219 // overflowed the smi range). | 1262 // overflowed the smi range). |
1220 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) { | 1263 if (allow_heapnumber_results == NO_HEAPNUMBER_RESULTS) { |
1221 __ bind(&use_fp_on_smis); | 1264 __ bind(&use_fp_on_smis); |
1222 switch (op_) { | 1265 switch (op_) { |
1223 // Undo the effects of some operations, and some register moves. | 1266 // Undo the effects of some operations, and some register moves. |
1224 case Token::SHL: | 1267 case Token::SHL: |
1225 // The arguments are saved on the stack, and only used from there. | 1268 // The arguments are saved on the stack, and only used from there. |
1226 break; | 1269 break; |
1227 case Token::ADD: | 1270 case Token::ADD: |
1228 // Revert right = right + left. | 1271 // Revert right = right + left. |
1229 __ sub(right, Operand(left)); | 1272 __ sub(right, left); |
1230 break; | 1273 break; |
1231 case Token::SUB: | 1274 case Token::SUB: |
1232 // Revert left = left - right. | 1275 // Revert left = left - right. |
1233 __ add(left, Operand(right)); | 1276 __ add(left, right); |
1234 break; | 1277 break; |
1235 case Token::MUL: | 1278 case Token::MUL: |
1236 // Right was clobbered but a copy is in ebx. | 1279 // Right was clobbered but a copy is in ebx. |
1237 __ mov(right, ebx); | 1280 __ mov(right, ebx); |
1238 break; | 1281 break; |
1239 case Token::DIV: | 1282 case Token::DIV: |
1240 // Left was clobbered but a copy is in edi. Right is in ebx for | 1283 // Left was clobbered but a copy is in edi. Right is in ebx for |
1241 // division. They should be in eax, ebx for jump to not_smi. | 1284 // division. They should be in eax, ebx for jump to not_smi. |
1242 __ mov(eax, edi); | 1285 __ mov(eax, edi); |
1243 break; | 1286 break; |
(...skipping 17 matching lines...) Expand all Loading... |
1261 // are about to return. | 1304 // are about to return. |
1262 if (op_ == Token::SHR) { | 1305 if (op_ == Token::SHR) { |
1263 __ mov(Operand(esp, 1 * kPointerSize), left); | 1306 __ mov(Operand(esp, 1 * kPointerSize), left); |
1264 __ mov(Operand(esp, 2 * kPointerSize), Immediate(0)); | 1307 __ mov(Operand(esp, 2 * kPointerSize), Immediate(0)); |
1265 __ fild_d(Operand(esp, 1 * kPointerSize)); | 1308 __ fild_d(Operand(esp, 1 * kPointerSize)); |
1266 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 1309 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
1267 } else { | 1310 } else { |
1268 ASSERT_EQ(Token::SHL, op_); | 1311 ASSERT_EQ(Token::SHL, op_); |
1269 if (CpuFeatures::IsSupported(SSE2)) { | 1312 if (CpuFeatures::IsSupported(SSE2)) { |
1270 CpuFeatures::Scope use_sse2(SSE2); | 1313 CpuFeatures::Scope use_sse2(SSE2); |
1271 __ cvtsi2sd(xmm0, Operand(left)); | 1314 __ cvtsi2sd(xmm0, left); |
1272 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); | 1315 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); |
1273 } else { | 1316 } else { |
1274 __ mov(Operand(esp, 1 * kPointerSize), left); | 1317 __ mov(Operand(esp, 1 * kPointerSize), left); |
1275 __ fild_s(Operand(esp, 1 * kPointerSize)); | 1318 __ fild_s(Operand(esp, 1 * kPointerSize)); |
1276 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 1319 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
1277 } | 1320 } |
1278 } | 1321 } |
1279 __ ret(2 * kPointerSize); | 1322 __ ret(2 * kPointerSize); |
1280 break; | 1323 break; |
1281 } | 1324 } |
1282 | 1325 |
1283 case Token::ADD: | 1326 case Token::ADD: |
1284 case Token::SUB: | 1327 case Token::SUB: |
1285 case Token::MUL: | 1328 case Token::MUL: |
1286 case Token::DIV: { | 1329 case Token::DIV: { |
1287 Comment perform_float(masm, "-- Perform float operation on smis"); | 1330 Comment perform_float(masm, "-- Perform float operation on smis"); |
1288 __ bind(&use_fp_on_smis); | 1331 __ bind(&use_fp_on_smis); |
1289 // Restore arguments to edx, eax. | 1332 // Restore arguments to edx, eax. |
1290 switch (op_) { | 1333 switch (op_) { |
1291 case Token::ADD: | 1334 case Token::ADD: |
1292 // Revert right = right + left. | 1335 // Revert right = right + left. |
1293 __ sub(right, Operand(left)); | 1336 __ sub(right, left); |
1294 break; | 1337 break; |
1295 case Token::SUB: | 1338 case Token::SUB: |
1296 // Revert left = left - right. | 1339 // Revert left = left - right. |
1297 __ add(left, Operand(right)); | 1340 __ add(left, right); |
1298 break; | 1341 break; |
1299 case Token::MUL: | 1342 case Token::MUL: |
1300 // Right was clobbered but a copy is in ebx. | 1343 // Right was clobbered but a copy is in ebx. |
1301 __ mov(right, ebx); | 1344 __ mov(right, ebx); |
1302 break; | 1345 break; |
1303 case Token::DIV: | 1346 case Token::DIV: |
1304 // Left was clobbered but a copy is in edi. Right is in ebx for | 1347 // Left was clobbered but a copy is in edi. Right is in ebx for |
1305 // division. | 1348 // division. |
1306 __ mov(edx, edi); | 1349 __ mov(edx, edi); |
1307 __ mov(eax, right); | 1350 __ mov(eax, right); |
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1479 switch (op_) { | 1522 switch (op_) { |
1480 case Token::ADD: __ addsd(xmm0, xmm1); break; | 1523 case Token::ADD: __ addsd(xmm0, xmm1); break; |
1481 case Token::SUB: __ subsd(xmm0, xmm1); break; | 1524 case Token::SUB: __ subsd(xmm0, xmm1); break; |
1482 case Token::MUL: __ mulsd(xmm0, xmm1); break; | 1525 case Token::MUL: __ mulsd(xmm0, xmm1); break; |
1483 case Token::DIV: __ divsd(xmm0, xmm1); break; | 1526 case Token::DIV: __ divsd(xmm0, xmm1); break; |
1484 default: UNREACHABLE(); | 1527 default: UNREACHABLE(); |
1485 } | 1528 } |
1486 // Check result type if it is currently Int32. | 1529 // Check result type if it is currently Int32. |
1487 if (result_type_ <= BinaryOpIC::INT32) { | 1530 if (result_type_ <= BinaryOpIC::INT32) { |
1488 __ cvttsd2si(ecx, Operand(xmm0)); | 1531 __ cvttsd2si(ecx, Operand(xmm0)); |
1489 __ cvtsi2sd(xmm2, Operand(ecx)); | 1532 __ cvtsi2sd(xmm2, ecx); |
1490 __ ucomisd(xmm0, xmm2); | 1533 __ ucomisd(xmm0, xmm2); |
1491 __ j(not_zero, ¬_int32); | 1534 __ j(not_zero, ¬_int32); |
1492 __ j(carry, ¬_int32); | 1535 __ j(carry, ¬_int32); |
1493 } | 1536 } |
1494 GenerateHeapResultAllocation(masm, &call_runtime); | 1537 GenerateHeapResultAllocation(masm, &call_runtime); |
1495 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); | 1538 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); |
1496 __ ret(0); | 1539 __ ret(0); |
1497 } else { // SSE2 not available, use FPU. | 1540 } else { // SSE2 not available, use FPU. |
1498 FloatingPointHelper::CheckFloatOperands(masm, ¬_floats, ebx); | 1541 FloatingPointHelper::CheckFloatOperands(masm, ¬_floats, ebx); |
1499 FloatingPointHelper::LoadFloatOperands( | 1542 FloatingPointHelper::LoadFloatOperands( |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1541 CpuFeatures::Scope use_sse2(SSE2); | 1584 CpuFeatures::Scope use_sse2(SSE2); |
1542 FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats); | 1585 FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats); |
1543 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, ¬_int32, ecx); | 1586 FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, ¬_int32, ecx); |
1544 }*/ | 1587 }*/ |
1545 FloatingPointHelper::LoadUnknownsAsIntegers(masm, | 1588 FloatingPointHelper::LoadUnknownsAsIntegers(masm, |
1546 use_sse3_, | 1589 use_sse3_, |
1547 ¬_floats); | 1590 ¬_floats); |
1548 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_, | 1591 FloatingPointHelper::CheckLoadedIntegersWereInt32(masm, use_sse3_, |
1549 ¬_int32); | 1592 ¬_int32); |
1550 switch (op_) { | 1593 switch (op_) { |
1551 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break; | 1594 case Token::BIT_OR: __ or_(eax, ecx); break; |
1552 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break; | 1595 case Token::BIT_AND: __ and_(eax, ecx); break; |
1553 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break; | 1596 case Token::BIT_XOR: __ xor_(eax, ecx); break; |
1554 case Token::SAR: __ sar_cl(eax); break; | 1597 case Token::SAR: __ sar_cl(eax); break; |
1555 case Token::SHL: __ shl_cl(eax); break; | 1598 case Token::SHL: __ shl_cl(eax); break; |
1556 case Token::SHR: __ shr_cl(eax); break; | 1599 case Token::SHR: __ shr_cl(eax); break; |
1557 default: UNREACHABLE(); | 1600 default: UNREACHABLE(); |
1558 } | 1601 } |
1559 if (op_ == Token::SHR) { | 1602 if (op_ == Token::SHR) { |
1560 // Check if result is non-negative and fits in a smi. | 1603 // Check if result is non-negative and fits in a smi. |
1561 __ test(eax, Immediate(0xc0000000)); | 1604 __ test(eax, Immediate(0xc0000000)); |
1562 __ j(not_zero, &call_runtime); | 1605 __ j(not_zero, &call_runtime); |
1563 } else { | 1606 } else { |
1564 // Check if result fits in a smi. | 1607 // Check if result fits in a smi. |
1565 __ cmp(eax, 0xc0000000); | 1608 __ cmp(eax, 0xc0000000); |
1566 __ j(negative, &non_smi_result, Label::kNear); | 1609 __ j(negative, &non_smi_result, Label::kNear); |
1567 } | 1610 } |
1568 // Tag smi result and return. | 1611 // Tag smi result and return. |
1569 __ SmiTag(eax); | 1612 __ SmiTag(eax); |
1570 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. | 1613 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. |
1571 | 1614 |
1572 // All ops except SHR return a signed int32 that we load in | 1615 // All ops except SHR return a signed int32 that we load in |
1573 // a HeapNumber. | 1616 // a HeapNumber. |
1574 if (op_ != Token::SHR) { | 1617 if (op_ != Token::SHR) { |
1575 __ bind(&non_smi_result); | 1618 __ bind(&non_smi_result); |
1576 // Allocate a heap number if needed. | 1619 // Allocate a heap number if needed. |
1577 __ mov(ebx, Operand(eax)); // ebx: result | 1620 __ mov(ebx, eax); // ebx: result |
1578 Label skip_allocation; | 1621 Label skip_allocation; |
1579 switch (mode_) { | 1622 switch (mode_) { |
1580 case OVERWRITE_LEFT: | 1623 case OVERWRITE_LEFT: |
1581 case OVERWRITE_RIGHT: | 1624 case OVERWRITE_RIGHT: |
1582 // If the operand was an object, we skip the | 1625 // If the operand was an object, we skip the |
1583 // allocation of a heap number. | 1626 // allocation of a heap number. |
1584 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? | 1627 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? |
1585 1 * kPointerSize : 2 * kPointerSize)); | 1628 1 * kPointerSize : 2 * kPointerSize)); |
1586 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); | 1629 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); |
1587 // Fall through! | 1630 // Fall through! |
1588 case NO_OVERWRITE: | 1631 case NO_OVERWRITE: |
1589 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); | 1632 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); |
1590 __ bind(&skip_allocation); | 1633 __ bind(&skip_allocation); |
1591 break; | 1634 break; |
1592 default: UNREACHABLE(); | 1635 default: UNREACHABLE(); |
1593 } | 1636 } |
1594 // Store the result in the HeapNumber and return. | 1637 // Store the result in the HeapNumber and return. |
1595 if (CpuFeatures::IsSupported(SSE2)) { | 1638 if (CpuFeatures::IsSupported(SSE2)) { |
1596 CpuFeatures::Scope use_sse2(SSE2); | 1639 CpuFeatures::Scope use_sse2(SSE2); |
1597 __ cvtsi2sd(xmm0, Operand(ebx)); | 1640 __ cvtsi2sd(xmm0, ebx); |
1598 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); | 1641 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); |
1599 } else { | 1642 } else { |
1600 __ mov(Operand(esp, 1 * kPointerSize), ebx); | 1643 __ mov(Operand(esp, 1 * kPointerSize), ebx); |
1601 __ fild_s(Operand(esp, 1 * kPointerSize)); | 1644 __ fild_s(Operand(esp, 1 * kPointerSize)); |
1602 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 1645 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
1603 } | 1646 } |
1604 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. | 1647 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. |
1605 } | 1648 } |
1606 | 1649 |
1607 __ bind(¬_floats); | 1650 __ bind(¬_floats); |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1668 GenerateAddStrings(masm); | 1711 GenerateAddStrings(masm); |
1669 } | 1712 } |
1670 | 1713 |
1671 Factory* factory = masm->isolate()->factory(); | 1714 Factory* factory = masm->isolate()->factory(); |
1672 | 1715 |
1673 // Convert odd ball arguments to numbers. | 1716 // Convert odd ball arguments to numbers. |
1674 Label check, done; | 1717 Label check, done; |
1675 __ cmp(edx, factory->undefined_value()); | 1718 __ cmp(edx, factory->undefined_value()); |
1676 __ j(not_equal, &check, Label::kNear); | 1719 __ j(not_equal, &check, Label::kNear); |
1677 if (Token::IsBitOp(op_)) { | 1720 if (Token::IsBitOp(op_)) { |
1678 __ xor_(edx, Operand(edx)); | 1721 __ xor_(edx, edx); |
1679 } else { | 1722 } else { |
1680 __ mov(edx, Immediate(factory->nan_value())); | 1723 __ mov(edx, Immediate(factory->nan_value())); |
1681 } | 1724 } |
1682 __ jmp(&done, Label::kNear); | 1725 __ jmp(&done, Label::kNear); |
1683 __ bind(&check); | 1726 __ bind(&check); |
1684 __ cmp(eax, factory->undefined_value()); | 1727 __ cmp(eax, factory->undefined_value()); |
1685 __ j(not_equal, &done, Label::kNear); | 1728 __ j(not_equal, &done, Label::kNear); |
1686 if (Token::IsBitOp(op_)) { | 1729 if (Token::IsBitOp(op_)) { |
1687 __ xor_(eax, Operand(eax)); | 1730 __ xor_(eax, eax); |
1688 } else { | 1731 } else { |
1689 __ mov(eax, Immediate(factory->nan_value())); | 1732 __ mov(eax, Immediate(factory->nan_value())); |
1690 } | 1733 } |
1691 __ bind(&done); | 1734 __ bind(&done); |
1692 | 1735 |
1693 GenerateHeapNumberStub(masm); | 1736 GenerateHeapNumberStub(masm); |
1694 } | 1737 } |
1695 | 1738 |
1696 | 1739 |
1697 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 1740 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1755 case Token::SAR: | 1798 case Token::SAR: |
1756 case Token::SHL: | 1799 case Token::SHL: |
1757 case Token::SHR: { | 1800 case Token::SHR: { |
1758 GenerateRegisterArgsPush(masm); | 1801 GenerateRegisterArgsPush(masm); |
1759 Label not_floats; | 1802 Label not_floats; |
1760 Label non_smi_result; | 1803 Label non_smi_result; |
1761 FloatingPointHelper::LoadUnknownsAsIntegers(masm, | 1804 FloatingPointHelper::LoadUnknownsAsIntegers(masm, |
1762 use_sse3_, | 1805 use_sse3_, |
1763 ¬_floats); | 1806 ¬_floats); |
1764 switch (op_) { | 1807 switch (op_) { |
1765 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break; | 1808 case Token::BIT_OR: __ or_(eax, ecx); break; |
1766 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break; | 1809 case Token::BIT_AND: __ and_(eax, ecx); break; |
1767 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break; | 1810 case Token::BIT_XOR: __ xor_(eax, ecx); break; |
1768 case Token::SAR: __ sar_cl(eax); break; | 1811 case Token::SAR: __ sar_cl(eax); break; |
1769 case Token::SHL: __ shl_cl(eax); break; | 1812 case Token::SHL: __ shl_cl(eax); break; |
1770 case Token::SHR: __ shr_cl(eax); break; | 1813 case Token::SHR: __ shr_cl(eax); break; |
1771 default: UNREACHABLE(); | 1814 default: UNREACHABLE(); |
1772 } | 1815 } |
1773 if (op_ == Token::SHR) { | 1816 if (op_ == Token::SHR) { |
1774 // Check if result is non-negative and fits in a smi. | 1817 // Check if result is non-negative and fits in a smi. |
1775 __ test(eax, Immediate(0xc0000000)); | 1818 __ test(eax, Immediate(0xc0000000)); |
1776 __ j(not_zero, &call_runtime); | 1819 __ j(not_zero, &call_runtime); |
1777 } else { | 1820 } else { |
1778 // Check if result fits in a smi. | 1821 // Check if result fits in a smi. |
1779 __ cmp(eax, 0xc0000000); | 1822 __ cmp(eax, 0xc0000000); |
1780 __ j(negative, &non_smi_result, Label::kNear); | 1823 __ j(negative, &non_smi_result, Label::kNear); |
1781 } | 1824 } |
1782 // Tag smi result and return. | 1825 // Tag smi result and return. |
1783 __ SmiTag(eax); | 1826 __ SmiTag(eax); |
1784 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. | 1827 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. |
1785 | 1828 |
1786 // All ops except SHR return a signed int32 that we load in | 1829 // All ops except SHR return a signed int32 that we load in |
1787 // a HeapNumber. | 1830 // a HeapNumber. |
1788 if (op_ != Token::SHR) { | 1831 if (op_ != Token::SHR) { |
1789 __ bind(&non_smi_result); | 1832 __ bind(&non_smi_result); |
1790 // Allocate a heap number if needed. | 1833 // Allocate a heap number if needed. |
1791 __ mov(ebx, Operand(eax)); // ebx: result | 1834 __ mov(ebx, eax); // ebx: result |
1792 Label skip_allocation; | 1835 Label skip_allocation; |
1793 switch (mode_) { | 1836 switch (mode_) { |
1794 case OVERWRITE_LEFT: | 1837 case OVERWRITE_LEFT: |
1795 case OVERWRITE_RIGHT: | 1838 case OVERWRITE_RIGHT: |
1796 // If the operand was an object, we skip the | 1839 // If the operand was an object, we skip the |
1797 // allocation of a heap number. | 1840 // allocation of a heap number. |
1798 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? | 1841 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? |
1799 1 * kPointerSize : 2 * kPointerSize)); | 1842 1 * kPointerSize : 2 * kPointerSize)); |
1800 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); | 1843 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); |
1801 // Fall through! | 1844 // Fall through! |
1802 case NO_OVERWRITE: | 1845 case NO_OVERWRITE: |
1803 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); | 1846 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); |
1804 __ bind(&skip_allocation); | 1847 __ bind(&skip_allocation); |
1805 break; | 1848 break; |
1806 default: UNREACHABLE(); | 1849 default: UNREACHABLE(); |
1807 } | 1850 } |
1808 // Store the result in the HeapNumber and return. | 1851 // Store the result in the HeapNumber and return. |
1809 if (CpuFeatures::IsSupported(SSE2)) { | 1852 if (CpuFeatures::IsSupported(SSE2)) { |
1810 CpuFeatures::Scope use_sse2(SSE2); | 1853 CpuFeatures::Scope use_sse2(SSE2); |
1811 __ cvtsi2sd(xmm0, Operand(ebx)); | 1854 __ cvtsi2sd(xmm0, ebx); |
1812 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); | 1855 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); |
1813 } else { | 1856 } else { |
1814 __ mov(Operand(esp, 1 * kPointerSize), ebx); | 1857 __ mov(Operand(esp, 1 * kPointerSize), ebx); |
1815 __ fild_s(Operand(esp, 1 * kPointerSize)); | 1858 __ fild_s(Operand(esp, 1 * kPointerSize)); |
1816 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 1859 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
1817 } | 1860 } |
1818 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. | 1861 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. |
1819 } | 1862 } |
1820 | 1863 |
1821 __ bind(¬_floats); | 1864 __ bind(¬_floats); |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1954 case Token::BIT_AND: | 1997 case Token::BIT_AND: |
1955 case Token::BIT_XOR: | 1998 case Token::BIT_XOR: |
1956 case Token::SAR: | 1999 case Token::SAR: |
1957 case Token::SHL: | 2000 case Token::SHL: |
1958 case Token::SHR: { | 2001 case Token::SHR: { |
1959 Label non_smi_result; | 2002 Label non_smi_result; |
1960 FloatingPointHelper::LoadUnknownsAsIntegers(masm, | 2003 FloatingPointHelper::LoadUnknownsAsIntegers(masm, |
1961 use_sse3_, | 2004 use_sse3_, |
1962 &call_runtime); | 2005 &call_runtime); |
1963 switch (op_) { | 2006 switch (op_) { |
1964 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break; | 2007 case Token::BIT_OR: __ or_(eax, ecx); break; |
1965 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break; | 2008 case Token::BIT_AND: __ and_(eax, ecx); break; |
1966 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break; | 2009 case Token::BIT_XOR: __ xor_(eax, ecx); break; |
1967 case Token::SAR: __ sar_cl(eax); break; | 2010 case Token::SAR: __ sar_cl(eax); break; |
1968 case Token::SHL: __ shl_cl(eax); break; | 2011 case Token::SHL: __ shl_cl(eax); break; |
1969 case Token::SHR: __ shr_cl(eax); break; | 2012 case Token::SHR: __ shr_cl(eax); break; |
1970 default: UNREACHABLE(); | 2013 default: UNREACHABLE(); |
1971 } | 2014 } |
1972 if (op_ == Token::SHR) { | 2015 if (op_ == Token::SHR) { |
1973 // Check if result is non-negative and fits in a smi. | 2016 // Check if result is non-negative and fits in a smi. |
1974 __ test(eax, Immediate(0xc0000000)); | 2017 __ test(eax, Immediate(0xc0000000)); |
1975 __ j(not_zero, &call_runtime); | 2018 __ j(not_zero, &call_runtime); |
1976 } else { | 2019 } else { |
1977 // Check if result fits in a smi. | 2020 // Check if result fits in a smi. |
1978 __ cmp(eax, 0xc0000000); | 2021 __ cmp(eax, 0xc0000000); |
1979 __ j(negative, &non_smi_result, Label::kNear); | 2022 __ j(negative, &non_smi_result, Label::kNear); |
1980 } | 2023 } |
1981 // Tag smi result and return. | 2024 // Tag smi result and return. |
1982 __ SmiTag(eax); | 2025 __ SmiTag(eax); |
1983 __ ret(2 * kPointerSize); // Drop the arguments from the stack. | 2026 __ ret(2 * kPointerSize); // Drop the arguments from the stack. |
1984 | 2027 |
1985 // All ops except SHR return a signed int32 that we load in | 2028 // All ops except SHR return a signed int32 that we load in |
1986 // a HeapNumber. | 2029 // a HeapNumber. |
1987 if (op_ != Token::SHR) { | 2030 if (op_ != Token::SHR) { |
1988 __ bind(&non_smi_result); | 2031 __ bind(&non_smi_result); |
1989 // Allocate a heap number if needed. | 2032 // Allocate a heap number if needed. |
1990 __ mov(ebx, Operand(eax)); // ebx: result | 2033 __ mov(ebx, eax); // ebx: result |
1991 Label skip_allocation; | 2034 Label skip_allocation; |
1992 switch (mode_) { | 2035 switch (mode_) { |
1993 case OVERWRITE_LEFT: | 2036 case OVERWRITE_LEFT: |
1994 case OVERWRITE_RIGHT: | 2037 case OVERWRITE_RIGHT: |
1995 // If the operand was an object, we skip the | 2038 // If the operand was an object, we skip the |
1996 // allocation of a heap number. | 2039 // allocation of a heap number. |
1997 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? | 2040 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? |
1998 1 * kPointerSize : 2 * kPointerSize)); | 2041 1 * kPointerSize : 2 * kPointerSize)); |
1999 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); | 2042 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); |
2000 // Fall through! | 2043 // Fall through! |
2001 case NO_OVERWRITE: | 2044 case NO_OVERWRITE: |
2002 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); | 2045 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); |
2003 __ bind(&skip_allocation); | 2046 __ bind(&skip_allocation); |
2004 break; | 2047 break; |
2005 default: UNREACHABLE(); | 2048 default: UNREACHABLE(); |
2006 } | 2049 } |
2007 // Store the result in the HeapNumber and return. | 2050 // Store the result in the HeapNumber and return. |
2008 if (CpuFeatures::IsSupported(SSE2)) { | 2051 if (CpuFeatures::IsSupported(SSE2)) { |
2009 CpuFeatures::Scope use_sse2(SSE2); | 2052 CpuFeatures::Scope use_sse2(SSE2); |
2010 __ cvtsi2sd(xmm0, Operand(ebx)); | 2053 __ cvtsi2sd(xmm0, ebx); |
2011 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); | 2054 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); |
2012 } else { | 2055 } else { |
2013 __ mov(Operand(esp, 1 * kPointerSize), ebx); | 2056 __ mov(Operand(esp, 1 * kPointerSize), ebx); |
2014 __ fild_s(Operand(esp, 1 * kPointerSize)); | 2057 __ fild_s(Operand(esp, 1 * kPointerSize)); |
2015 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 2058 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
2016 } | 2059 } |
2017 __ ret(2 * kPointerSize); | 2060 __ ret(2 * kPointerSize); |
2018 } | 2061 } |
2019 break; | 2062 break; |
2020 } | 2063 } |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2110 switch (mode) { | 2153 switch (mode) { |
2111 case OVERWRITE_LEFT: { | 2154 case OVERWRITE_LEFT: { |
2112 // If the argument in edx is already an object, we skip the | 2155 // If the argument in edx is already an object, we skip the |
2113 // allocation of a heap number. | 2156 // allocation of a heap number. |
2114 __ JumpIfNotSmi(edx, &skip_allocation, Label::kNear); | 2157 __ JumpIfNotSmi(edx, &skip_allocation, Label::kNear); |
2115 // Allocate a heap number for the result. Keep eax and edx intact | 2158 // Allocate a heap number for the result. Keep eax and edx intact |
2116 // for the possible runtime call. | 2159 // for the possible runtime call. |
2117 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure); | 2160 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure); |
2118 // Now edx can be overwritten losing one of the arguments as we are | 2161 // Now edx can be overwritten losing one of the arguments as we are |
2119 // now done and will not need it any more. | 2162 // now done and will not need it any more. |
2120 __ mov(edx, Operand(ebx)); | 2163 __ mov(edx, ebx); |
2121 __ bind(&skip_allocation); | 2164 __ bind(&skip_allocation); |
2122 // Use object in edx as a result holder | 2165 // Use object in edx as a result holder |
2123 __ mov(eax, Operand(edx)); | 2166 __ mov(eax, edx); |
2124 break; | 2167 break; |
2125 } | 2168 } |
2126 case OVERWRITE_RIGHT: | 2169 case OVERWRITE_RIGHT: |
2127 // If the argument in eax is already an object, we skip the | 2170 // If the argument in eax is already an object, we skip the |
2128 // allocation of a heap number. | 2171 // allocation of a heap number. |
2129 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); | 2172 __ JumpIfNotSmi(eax, &skip_allocation, Label::kNear); |
2130 // Fall through! | 2173 // Fall through! |
2131 case NO_OVERWRITE: | 2174 case NO_OVERWRITE: |
2132 // Allocate a heap number for the result. Keep eax and edx intact | 2175 // Allocate a heap number for the result. Keep eax and edx intact |
2133 // for the possible runtime call. | 2176 // for the possible runtime call. |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2171 if (tagged) { | 2214 if (tagged) { |
2172 // Test that eax is a number. | 2215 // Test that eax is a number. |
2173 Label input_not_smi; | 2216 Label input_not_smi; |
2174 Label loaded; | 2217 Label loaded; |
2175 __ mov(eax, Operand(esp, kPointerSize)); | 2218 __ mov(eax, Operand(esp, kPointerSize)); |
2176 __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear); | 2219 __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear); |
2177 // Input is a smi. Untag and load it onto the FPU stack. | 2220 // Input is a smi. Untag and load it onto the FPU stack. |
2178 // Then load the low and high words of the double into ebx, edx. | 2221 // Then load the low and high words of the double into ebx, edx. |
2179 STATIC_ASSERT(kSmiTagSize == 1); | 2222 STATIC_ASSERT(kSmiTagSize == 1); |
2180 __ sar(eax, 1); | 2223 __ sar(eax, 1); |
2181 __ sub(Operand(esp), Immediate(2 * kPointerSize)); | 2224 __ sub(esp, Immediate(2 * kPointerSize)); |
2182 __ mov(Operand(esp, 0), eax); | 2225 __ mov(Operand(esp, 0), eax); |
2183 __ fild_s(Operand(esp, 0)); | 2226 __ fild_s(Operand(esp, 0)); |
2184 __ fst_d(Operand(esp, 0)); | 2227 __ fst_d(Operand(esp, 0)); |
2185 __ pop(edx); | 2228 __ pop(edx); |
2186 __ pop(ebx); | 2229 __ pop(ebx); |
2187 __ jmp(&loaded, Label::kNear); | 2230 __ jmp(&loaded, Label::kNear); |
2188 __ bind(&input_not_smi); | 2231 __ bind(&input_not_smi); |
2189 // Check if input is a HeapNumber. | 2232 // Check if input is a HeapNumber. |
2190 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); | 2233 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); |
2191 Factory* factory = masm->isolate()->factory(); | 2234 Factory* factory = masm->isolate()->factory(); |
2192 __ cmp(Operand(ebx), Immediate(factory->heap_number_map())); | 2235 __ cmp(ebx, Immediate(factory->heap_number_map())); |
2193 __ j(not_equal, &runtime_call); | 2236 __ j(not_equal, &runtime_call); |
2194 // Input is a HeapNumber. Push it on the FPU stack and load its | 2237 // Input is a HeapNumber. Push it on the FPU stack and load its |
2195 // low and high words into ebx, edx. | 2238 // low and high words into ebx, edx. |
2196 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 2239 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
2197 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); | 2240 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); |
2198 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset)); | 2241 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset)); |
2199 | 2242 |
2200 __ bind(&loaded); | 2243 __ bind(&loaded); |
2201 } else { // UNTAGGED. | 2244 } else { // UNTAGGED. |
2202 if (CpuFeatures::IsSupported(SSE4_1)) { | 2245 if (CpuFeatures::IsSupported(SSE4_1)) { |
2203 CpuFeatures::Scope sse4_scope(SSE4_1); | 2246 CpuFeatures::Scope sse4_scope(SSE4_1); |
2204 __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx. | 2247 __ pextrd(edx, xmm1, 0x1); // copy xmm1[63..32] to edx. |
2205 } else { | 2248 } else { |
2206 __ pshufd(xmm0, xmm1, 0x1); | 2249 __ pshufd(xmm0, xmm1, 0x1); |
2207 __ movd(Operand(edx), xmm0); | 2250 __ movd(edx, xmm0); |
2208 } | 2251 } |
2209 __ movd(Operand(ebx), xmm1); | 2252 __ movd(ebx, xmm1); |
2210 } | 2253 } |
2211 | 2254 |
2212 // ST[0] or xmm1 == double value | 2255 // ST[0] or xmm1 == double value |
2213 // ebx = low 32 bits of double value | 2256 // ebx = low 32 bits of double value |
2214 // edx = high 32 bits of double value | 2257 // edx = high 32 bits of double value |
2215 // Compute hash (the shifts are arithmetic): | 2258 // Compute hash (the shifts are arithmetic): |
2216 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1); | 2259 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1); |
2217 __ mov(ecx, ebx); | 2260 __ mov(ecx, ebx); |
2218 __ xor_(ecx, Operand(edx)); | 2261 __ xor_(ecx, edx); |
2219 __ mov(eax, ecx); | 2262 __ mov(eax, ecx); |
2220 __ sar(eax, 16); | 2263 __ sar(eax, 16); |
2221 __ xor_(ecx, Operand(eax)); | 2264 __ xor_(ecx, eax); |
2222 __ mov(eax, ecx); | 2265 __ mov(eax, ecx); |
2223 __ sar(eax, 8); | 2266 __ sar(eax, 8); |
2224 __ xor_(ecx, Operand(eax)); | 2267 __ xor_(ecx, eax); |
2225 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize)); | 2268 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize)); |
2226 __ and_(Operand(ecx), | 2269 __ and_(ecx, |
2227 Immediate(TranscendentalCache::SubCache::kCacheSize - 1)); | 2270 Immediate(TranscendentalCache::SubCache::kCacheSize - 1)); |
2228 | 2271 |
2229 // ST[0] or xmm1 == double value. | 2272 // ST[0] or xmm1 == double value. |
2230 // ebx = low 32 bits of double value. | 2273 // ebx = low 32 bits of double value. |
2231 // edx = high 32 bits of double value. | 2274 // edx = high 32 bits of double value. |
2232 // ecx = TranscendentalCache::hash(double value). | 2275 // ecx = TranscendentalCache::hash(double value). |
2233 ExternalReference cache_array = | 2276 ExternalReference cache_array = |
2234 ExternalReference::transcendental_cache_array_address(masm->isolate()); | 2277 ExternalReference::transcendental_cache_array_address(masm->isolate()); |
2235 __ mov(eax, Immediate(cache_array)); | 2278 __ mov(eax, Immediate(cache_array)); |
2236 int cache_array_index = | 2279 int cache_array_index = |
2237 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]); | 2280 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]); |
2238 __ mov(eax, Operand(eax, cache_array_index)); | 2281 __ mov(eax, Operand(eax, cache_array_index)); |
2239 // Eax points to the cache for the type type_. | 2282 // Eax points to the cache for the type type_. |
2240 // If NULL, the cache hasn't been initialized yet, so go through runtime. | 2283 // If NULL, the cache hasn't been initialized yet, so go through runtime. |
2241 __ test(eax, Operand(eax)); | 2284 __ test(eax, eax); |
2242 __ j(zero, &runtime_call_clear_stack); | 2285 __ j(zero, &runtime_call_clear_stack); |
2243 #ifdef DEBUG | 2286 #ifdef DEBUG |
2244 // Check that the layout of cache elements match expectations. | 2287 // Check that the layout of cache elements match expectations. |
2245 { TranscendentalCache::SubCache::Element test_elem[2]; | 2288 { TranscendentalCache::SubCache::Element test_elem[2]; |
2246 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); | 2289 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); |
2247 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); | 2290 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); |
2248 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); | 2291 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); |
2249 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1])); | 2292 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1])); |
2250 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output)); | 2293 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output)); |
2251 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer. | 2294 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer. |
(...skipping 22 matching lines...) Expand all Loading... |
2274 } | 2317 } |
2275 | 2318 |
2276 __ bind(&cache_miss); | 2319 __ bind(&cache_miss); |
2277 // Update cache with new value. | 2320 // Update cache with new value. |
2278 // We are short on registers, so use no_reg as scratch. | 2321 // We are short on registers, so use no_reg as scratch. |
2279 // This gives slightly larger code. | 2322 // This gives slightly larger code. |
2280 if (tagged) { | 2323 if (tagged) { |
2281 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack); | 2324 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack); |
2282 } else { // UNTAGGED. | 2325 } else { // UNTAGGED. |
2283 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); | 2326 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); |
2284 __ sub(Operand(esp), Immediate(kDoubleSize)); | 2327 __ sub(esp, Immediate(kDoubleSize)); |
2285 __ movdbl(Operand(esp, 0), xmm1); | 2328 __ movdbl(Operand(esp, 0), xmm1); |
2286 __ fld_d(Operand(esp, 0)); | 2329 __ fld_d(Operand(esp, 0)); |
2287 __ add(Operand(esp), Immediate(kDoubleSize)); | 2330 __ add(esp, Immediate(kDoubleSize)); |
2288 } | 2331 } |
2289 GenerateOperation(masm); | 2332 GenerateOperation(masm); |
2290 __ mov(Operand(ecx, 0), ebx); | 2333 __ mov(Operand(ecx, 0), ebx); |
2291 __ mov(Operand(ecx, kIntSize), edx); | 2334 __ mov(Operand(ecx, kIntSize), edx); |
2292 __ mov(Operand(ecx, 2 * kIntSize), eax); | 2335 __ mov(Operand(ecx, 2 * kIntSize), eax); |
2293 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); | 2336 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); |
2294 if (tagged) { | 2337 if (tagged) { |
2295 __ ret(kPointerSize); | 2338 __ ret(kPointerSize); |
2296 } else { // UNTAGGED. | 2339 } else { // UNTAGGED. |
2297 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 2340 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
2298 __ Ret(); | 2341 __ Ret(); |
2299 | 2342 |
2300 // Skip cache and return answer directly, only in untagged case. | 2343 // Skip cache and return answer directly, only in untagged case. |
2301 __ bind(&skip_cache); | 2344 __ bind(&skip_cache); |
2302 __ sub(Operand(esp), Immediate(kDoubleSize)); | 2345 __ sub(esp, Immediate(kDoubleSize)); |
2303 __ movdbl(Operand(esp, 0), xmm1); | 2346 __ movdbl(Operand(esp, 0), xmm1); |
2304 __ fld_d(Operand(esp, 0)); | 2347 __ fld_d(Operand(esp, 0)); |
2305 GenerateOperation(masm); | 2348 GenerateOperation(masm); |
2306 __ fstp_d(Operand(esp, 0)); | 2349 __ fstp_d(Operand(esp, 0)); |
2307 __ movdbl(xmm1, Operand(esp, 0)); | 2350 __ movdbl(xmm1, Operand(esp, 0)); |
2308 __ add(Operand(esp), Immediate(kDoubleSize)); | 2351 __ add(esp, Immediate(kDoubleSize)); |
2309 // We return the value in xmm1 without adding it to the cache, but | 2352 // We return the value in xmm1 without adding it to the cache, but |
2310 // we cause a scavenging GC so that future allocations will succeed. | 2353 // we cause a scavenging GC so that future allocations will succeed. |
2311 __ EnterInternalFrame(); | 2354 { |
2312 // Allocate an unused object bigger than a HeapNumber. | 2355 FrameScope scope(masm, StackFrame::INTERNAL); |
2313 __ push(Immediate(Smi::FromInt(2 * kDoubleSize))); | 2356 // Allocate an unused object bigger than a HeapNumber. |
2314 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 2357 __ push(Immediate(Smi::FromInt(2 * kDoubleSize))); |
2315 __ LeaveInternalFrame(); | 2358 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
| 2359 } |
2316 __ Ret(); | 2360 __ Ret(); |
2317 } | 2361 } |
2318 | 2362 |
2319 // Call runtime, doing whatever allocation and cleanup is necessary. | 2363 // Call runtime, doing whatever allocation and cleanup is necessary. |
2320 if (tagged) { | 2364 if (tagged) { |
2321 __ bind(&runtime_call_clear_stack); | 2365 __ bind(&runtime_call_clear_stack); |
2322 __ fstp(0); | 2366 __ fstp(0); |
2323 __ bind(&runtime_call); | 2367 __ bind(&runtime_call); |
2324 ExternalReference runtime = | 2368 ExternalReference runtime = |
2325 ExternalReference(RuntimeFunction(), masm->isolate()); | 2369 ExternalReference(RuntimeFunction(), masm->isolate()); |
2326 __ TailCallExternalReference(runtime, 1, 1); | 2370 __ TailCallExternalReference(runtime, 1, 1); |
2327 } else { // UNTAGGED. | 2371 } else { // UNTAGGED. |
2328 __ bind(&runtime_call_clear_stack); | 2372 __ bind(&runtime_call_clear_stack); |
2329 __ bind(&runtime_call); | 2373 __ bind(&runtime_call); |
2330 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); | 2374 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); |
2331 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1); | 2375 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1); |
2332 __ EnterInternalFrame(); | 2376 { |
2333 __ push(eax); | 2377 FrameScope scope(masm, StackFrame::INTERNAL); |
2334 __ CallRuntime(RuntimeFunction(), 1); | 2378 __ push(eax); |
2335 __ LeaveInternalFrame(); | 2379 __ CallRuntime(RuntimeFunction(), 1); |
| 2380 } |
2336 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 2381 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
2337 __ Ret(); | 2382 __ Ret(); |
2338 } | 2383 } |
2339 } | 2384 } |
2340 | 2385 |
2341 | 2386 |
2342 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 2387 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
2343 switch (type_) { | 2388 switch (type_) { |
2344 case TranscendentalCache::SIN: return Runtime::kMath_sin; | 2389 case TranscendentalCache::SIN: return Runtime::kMath_sin; |
2345 case TranscendentalCache::COS: return Runtime::kMath_cos; | 2390 case TranscendentalCache::COS: return Runtime::kMath_cos; |
(...skipping 11 matching lines...) Expand all Loading... |
2357 // Input value is possibly in xmm1. | 2402 // Input value is possibly in xmm1. |
2358 // Address of result (a newly allocated HeapNumber) may be in eax. | 2403 // Address of result (a newly allocated HeapNumber) may be in eax. |
2359 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) { | 2404 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) { |
2360 // Both fsin and fcos require arguments in the range +/-2^63 and | 2405 // Both fsin and fcos require arguments in the range +/-2^63 and |
2361 // return NaN for infinities and NaN. They can share all code except | 2406 // return NaN for infinities and NaN. They can share all code except |
2362 // the actual fsin/fcos operation. | 2407 // the actual fsin/fcos operation. |
2363 Label in_range, done; | 2408 Label in_range, done; |
2364 // If argument is outside the range -2^63..2^63, fsin/cos doesn't | 2409 // If argument is outside the range -2^63..2^63, fsin/cos doesn't |
2365 // work. We must reduce it to the appropriate range. | 2410 // work. We must reduce it to the appropriate range. |
2366 __ mov(edi, edx); | 2411 __ mov(edi, edx); |
2367 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only. | 2412 __ and_(edi, Immediate(0x7ff00000)); // Exponent only. |
2368 int supported_exponent_limit = | 2413 int supported_exponent_limit = |
2369 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift; | 2414 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift; |
2370 __ cmp(Operand(edi), Immediate(supported_exponent_limit)); | 2415 __ cmp(edi, Immediate(supported_exponent_limit)); |
2371 __ j(below, &in_range, Label::kNear); | 2416 __ j(below, &in_range, Label::kNear); |
2372 // Check for infinity and NaN. Both return NaN for sin. | 2417 // Check for infinity and NaN. Both return NaN for sin. |
2373 __ cmp(Operand(edi), Immediate(0x7ff00000)); | 2418 __ cmp(edi, Immediate(0x7ff00000)); |
2374 Label non_nan_result; | 2419 Label non_nan_result; |
2375 __ j(not_equal, &non_nan_result, Label::kNear); | 2420 __ j(not_equal, &non_nan_result, Label::kNear); |
2376 // Input is +/-Infinity or NaN. Result is NaN. | 2421 // Input is +/-Infinity or NaN. Result is NaN. |
2377 __ fstp(0); | 2422 __ fstp(0); |
2378 // NaN is represented by 0x7ff8000000000000. | 2423 // NaN is represented by 0x7ff8000000000000. |
2379 __ push(Immediate(0x7ff80000)); | 2424 __ push(Immediate(0x7ff80000)); |
2380 __ push(Immediate(0)); | 2425 __ push(Immediate(0)); |
2381 __ fld_d(Operand(esp, 0)); | 2426 __ fld_d(Operand(esp, 0)); |
2382 __ add(Operand(esp), Immediate(2 * kPointerSize)); | 2427 __ add(esp, Immediate(2 * kPointerSize)); |
2383 __ jmp(&done, Label::kNear); | 2428 __ jmp(&done, Label::kNear); |
2384 | 2429 |
2385 __ bind(&non_nan_result); | 2430 __ bind(&non_nan_result); |
2386 | 2431 |
2387 // Use fpmod to restrict argument to the range +/-2*PI. | 2432 // Use fpmod to restrict argument to the range +/-2*PI. |
2388 __ mov(edi, eax); // Save eax before using fnstsw_ax. | 2433 __ mov(edi, eax); // Save eax before using fnstsw_ax. |
2389 __ fldpi(); | 2434 __ fldpi(); |
2390 __ fadd(0); | 2435 __ fadd(0); |
2391 __ fld(1); | 2436 __ fld(1); |
2392 // FPU Stack: input, 2*pi, input. | 2437 // FPU Stack: input, 2*pi, input. |
2393 { | 2438 { |
2394 Label no_exceptions; | 2439 Label no_exceptions; |
2395 __ fwait(); | 2440 __ fwait(); |
2396 __ fnstsw_ax(); | 2441 __ fnstsw_ax(); |
2397 // Clear if Illegal Operand or Zero Division exceptions are set. | 2442 // Clear if Illegal Operand or Zero Division exceptions are set. |
2398 __ test(Operand(eax), Immediate(5)); | 2443 __ test(eax, Immediate(5)); |
2399 __ j(zero, &no_exceptions, Label::kNear); | 2444 __ j(zero, &no_exceptions, Label::kNear); |
2400 __ fnclex(); | 2445 __ fnclex(); |
2401 __ bind(&no_exceptions); | 2446 __ bind(&no_exceptions); |
2402 } | 2447 } |
2403 | 2448 |
2404 // Compute st(0) % st(1) | 2449 // Compute st(0) % st(1) |
2405 { | 2450 { |
2406 Label partial_remainder_loop; | 2451 Label partial_remainder_loop; |
2407 __ bind(&partial_remainder_loop); | 2452 __ bind(&partial_remainder_loop); |
2408 __ fprem1(); | 2453 __ fprem1(); |
2409 __ fwait(); | 2454 __ fwait(); |
2410 __ fnstsw_ax(); | 2455 __ fnstsw_ax(); |
2411 __ test(Operand(eax), Immediate(0x400 /* C2 */)); | 2456 __ test(eax, Immediate(0x400 /* C2 */)); |
2412 // If C2 is set, computation only has partial result. Loop to | 2457 // If C2 is set, computation only has partial result. Loop to |
2413 // continue computation. | 2458 // continue computation. |
2414 __ j(not_zero, &partial_remainder_loop); | 2459 __ j(not_zero, &partial_remainder_loop); |
2415 } | 2460 } |
2416 // FPU Stack: input, 2*pi, input % 2*pi | 2461 // FPU Stack: input, 2*pi, input % 2*pi |
2417 __ fstp(2); | 2462 __ fstp(2); |
2418 __ fstp(0); | 2463 __ fstp(0); |
2419 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer). | 2464 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer). |
2420 | 2465 |
2421 // FPU Stack: input % 2*pi | 2466 // FPU Stack: input % 2*pi |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2534 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); | 2579 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); |
2535 | 2580 |
2536 __ bind(&load_eax); | 2581 __ bind(&load_eax); |
2537 // Load operand in eax into xmm1. | 2582 // Load operand in eax into xmm1. |
2538 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear); | 2583 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear); |
2539 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 2584 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
2540 __ jmp(&done, Label::kNear); | 2585 __ jmp(&done, Label::kNear); |
2541 | 2586 |
2542 __ bind(&load_smi_edx); | 2587 __ bind(&load_smi_edx); |
2543 __ SmiUntag(edx); // Untag smi before converting to float. | 2588 __ SmiUntag(edx); // Untag smi before converting to float. |
2544 __ cvtsi2sd(xmm0, Operand(edx)); | 2589 __ cvtsi2sd(xmm0, edx); |
2545 __ SmiTag(edx); // Retag smi for heap number overwriting test. | 2590 __ SmiTag(edx); // Retag smi for heap number overwriting test. |
2546 __ jmp(&load_eax); | 2591 __ jmp(&load_eax); |
2547 | 2592 |
2548 __ bind(&load_smi_eax); | 2593 __ bind(&load_smi_eax); |
2549 __ SmiUntag(eax); // Untag smi before converting to float. | 2594 __ SmiUntag(eax); // Untag smi before converting to float. |
2550 __ cvtsi2sd(xmm1, Operand(eax)); | 2595 __ cvtsi2sd(xmm1, eax); |
2551 __ SmiTag(eax); // Retag smi for heap number overwriting test. | 2596 __ SmiTag(eax); // Retag smi for heap number overwriting test. |
2552 | 2597 |
2553 __ bind(&done); | 2598 __ bind(&done); |
2554 } | 2599 } |
2555 | 2600 |
2556 | 2601 |
2557 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, | 2602 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, |
2558 Label* not_numbers) { | 2603 Label* not_numbers) { |
2559 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; | 2604 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; |
2560 // Load operand in edx into xmm0, or branch to not_numbers. | 2605 // Load operand in edx into xmm0, or branch to not_numbers. |
2561 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear); | 2606 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear); |
2562 Factory* factory = masm->isolate()->factory(); | 2607 Factory* factory = masm->isolate()->factory(); |
2563 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map()); | 2608 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map()); |
2564 __ j(not_equal, not_numbers); // Argument in edx is not a number. | 2609 __ j(not_equal, not_numbers); // Argument in edx is not a number. |
2565 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); | 2610 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); |
2566 __ bind(&load_eax); | 2611 __ bind(&load_eax); |
2567 // Load operand in eax into xmm1, or branch to not_numbers. | 2612 // Load operand in eax into xmm1, or branch to not_numbers. |
2568 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear); | 2613 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear); |
2569 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map()); | 2614 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map()); |
2570 __ j(equal, &load_float_eax, Label::kNear); | 2615 __ j(equal, &load_float_eax, Label::kNear); |
2571 __ jmp(not_numbers); // Argument in eax is not a number. | 2616 __ jmp(not_numbers); // Argument in eax is not a number. |
2572 __ bind(&load_smi_edx); | 2617 __ bind(&load_smi_edx); |
2573 __ SmiUntag(edx); // Untag smi before converting to float. | 2618 __ SmiUntag(edx); // Untag smi before converting to float. |
2574 __ cvtsi2sd(xmm0, Operand(edx)); | 2619 __ cvtsi2sd(xmm0, edx); |
2575 __ SmiTag(edx); // Retag smi for heap number overwriting test. | 2620 __ SmiTag(edx); // Retag smi for heap number overwriting test. |
2576 __ jmp(&load_eax); | 2621 __ jmp(&load_eax); |
2577 __ bind(&load_smi_eax); | 2622 __ bind(&load_smi_eax); |
2578 __ SmiUntag(eax); // Untag smi before converting to float. | 2623 __ SmiUntag(eax); // Untag smi before converting to float. |
2579 __ cvtsi2sd(xmm1, Operand(eax)); | 2624 __ cvtsi2sd(xmm1, eax); |
2580 __ SmiTag(eax); // Retag smi for heap number overwriting test. | 2625 __ SmiTag(eax); // Retag smi for heap number overwriting test. |
2581 __ jmp(&done, Label::kNear); | 2626 __ jmp(&done, Label::kNear); |
2582 __ bind(&load_float_eax); | 2627 __ bind(&load_float_eax); |
2583 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 2628 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
2584 __ bind(&done); | 2629 __ bind(&done); |
2585 } | 2630 } |
2586 | 2631 |
2587 | 2632 |
2588 void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm, | 2633 void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm, |
2589 Register scratch) { | 2634 Register scratch) { |
2590 const Register left = edx; | 2635 const Register left = edx; |
2591 const Register right = eax; | 2636 const Register right = eax; |
2592 __ mov(scratch, left); | 2637 __ mov(scratch, left); |
2593 ASSERT(!scratch.is(right)); // We're about to clobber scratch. | 2638 ASSERT(!scratch.is(right)); // We're about to clobber scratch. |
2594 __ SmiUntag(scratch); | 2639 __ SmiUntag(scratch); |
2595 __ cvtsi2sd(xmm0, Operand(scratch)); | 2640 __ cvtsi2sd(xmm0, scratch); |
2596 | 2641 |
2597 __ mov(scratch, right); | 2642 __ mov(scratch, right); |
2598 __ SmiUntag(scratch); | 2643 __ SmiUntag(scratch); |
2599 __ cvtsi2sd(xmm1, Operand(scratch)); | 2644 __ cvtsi2sd(xmm1, scratch); |
2600 } | 2645 } |
2601 | 2646 |
2602 | 2647 |
2603 void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm, | 2648 void FloatingPointHelper::CheckSSE2OperandsAreInt32(MacroAssembler* masm, |
2604 Label* non_int32, | 2649 Label* non_int32, |
2605 Register scratch) { | 2650 Register scratch) { |
2606 __ cvttsd2si(scratch, Operand(xmm0)); | 2651 __ cvttsd2si(scratch, Operand(xmm0)); |
2607 __ cvtsi2sd(xmm2, Operand(scratch)); | 2652 __ cvtsi2sd(xmm2, scratch); |
2608 __ ucomisd(xmm0, xmm2); | 2653 __ ucomisd(xmm0, xmm2); |
2609 __ j(not_zero, non_int32); | 2654 __ j(not_zero, non_int32); |
2610 __ j(carry, non_int32); | 2655 __ j(carry, non_int32); |
2611 __ cvttsd2si(scratch, Operand(xmm1)); | 2656 __ cvttsd2si(scratch, Operand(xmm1)); |
2612 __ cvtsi2sd(xmm2, Operand(scratch)); | 2657 __ cvtsi2sd(xmm2, scratch); |
2613 __ ucomisd(xmm1, xmm2); | 2658 __ ucomisd(xmm1, xmm2); |
2614 __ j(not_zero, non_int32); | 2659 __ j(not_zero, non_int32); |
2615 __ j(carry, non_int32); | 2660 __ j(carry, non_int32); |
2616 } | 2661 } |
2617 | 2662 |
2618 | 2663 |
2619 void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm, | 2664 void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm, |
2620 Register scratch, | 2665 Register scratch, |
2621 ArgLocation arg_location) { | 2666 ArgLocation arg_location) { |
2622 Label load_smi_1, load_smi_2, done_load_1, done; | 2667 Label load_smi_1, load_smi_2, done_load_1, done; |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2710 | 2755 |
2711 CpuFeatures::Scope use_sse2(SSE2); | 2756 CpuFeatures::Scope use_sse2(SSE2); |
2712 Label allocate_return, call_runtime; | 2757 Label allocate_return, call_runtime; |
2713 | 2758 |
2714 // Load input parameters. | 2759 // Load input parameters. |
2715 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 2760 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
2716 __ mov(eax, Operand(esp, 1 * kPointerSize)); | 2761 __ mov(eax, Operand(esp, 1 * kPointerSize)); |
2717 | 2762 |
2718 // Save 1 in xmm3 - we need this several times later on. | 2763 // Save 1 in xmm3 - we need this several times later on. |
2719 __ mov(ecx, Immediate(1)); | 2764 __ mov(ecx, Immediate(1)); |
2720 __ cvtsi2sd(xmm3, Operand(ecx)); | 2765 __ cvtsi2sd(xmm3, ecx); |
2721 | 2766 |
2722 Label exponent_nonsmi; | 2767 Label exponent_nonsmi; |
2723 Label base_nonsmi; | 2768 Label base_nonsmi; |
2724 // If the exponent is a heap number go to that specific case. | 2769 // If the exponent is a heap number go to that specific case. |
2725 __ JumpIfNotSmi(eax, &exponent_nonsmi); | 2770 __ JumpIfNotSmi(eax, &exponent_nonsmi); |
2726 __ JumpIfNotSmi(edx, &base_nonsmi); | 2771 __ JumpIfNotSmi(edx, &base_nonsmi); |
2727 | 2772 |
2728 // Optimized version when both exponent and base are smis. | 2773 // Optimized version when both exponent and base are smis. |
2729 Label powi; | 2774 Label powi; |
2730 __ SmiUntag(edx); | 2775 __ SmiUntag(edx); |
2731 __ cvtsi2sd(xmm0, Operand(edx)); | 2776 __ cvtsi2sd(xmm0, edx); |
2732 __ jmp(&powi); | 2777 __ jmp(&powi); |
2733 // exponent is smi and base is a heapnumber. | 2778 // exponent is smi and base is a heapnumber. |
2734 __ bind(&base_nonsmi); | 2779 __ bind(&base_nonsmi); |
2735 Factory* factory = masm->isolate()->factory(); | 2780 Factory* factory = masm->isolate()->factory(); |
2736 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), | 2781 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), |
2737 factory->heap_number_map()); | 2782 factory->heap_number_map()); |
2738 __ j(not_equal, &call_runtime); | 2783 __ j(not_equal, &call_runtime); |
2739 | 2784 |
2740 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); | 2785 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); |
2741 | 2786 |
(...skipping 21 matching lines...) Expand all Loading... |
2763 __ bind(&while_true); | 2808 __ bind(&while_true); |
2764 __ shr(eax, 1); | 2809 __ shr(eax, 1); |
2765 __ j(not_carry, &no_multiply, Label::kNear); | 2810 __ j(not_carry, &no_multiply, Label::kNear); |
2766 __ mulsd(xmm1, xmm0); | 2811 __ mulsd(xmm1, xmm0); |
2767 __ bind(&no_multiply); | 2812 __ bind(&no_multiply); |
2768 __ mulsd(xmm0, xmm0); | 2813 __ mulsd(xmm0, xmm0); |
2769 __ j(not_zero, &while_true); | 2814 __ j(not_zero, &while_true); |
2770 | 2815 |
2771 // base has the original value of the exponent - if the exponent is | 2816 // base has the original value of the exponent - if the exponent is |
2772 // negative return 1/result. | 2817 // negative return 1/result. |
2773 __ test(edx, Operand(edx)); | 2818 __ test(edx, edx); |
2774 __ j(positive, &allocate_return); | 2819 __ j(positive, &allocate_return); |
2775 // Special case if xmm1 has reached infinity. | 2820 // Special case if xmm1 has reached infinity. |
2776 __ mov(ecx, Immediate(0x7FB00000)); | 2821 __ mov(ecx, Immediate(0x7FB00000)); |
2777 __ movd(xmm0, Operand(ecx)); | 2822 __ movd(xmm0, ecx); |
2778 __ cvtss2sd(xmm0, xmm0); | 2823 __ cvtss2sd(xmm0, xmm0); |
2779 __ ucomisd(xmm0, xmm1); | 2824 __ ucomisd(xmm0, xmm1); |
2780 __ j(equal, &call_runtime); | 2825 __ j(equal, &call_runtime); |
2781 __ divsd(xmm3, xmm1); | 2826 __ divsd(xmm3, xmm1); |
2782 __ movsd(xmm1, xmm3); | 2827 __ movsd(xmm1, xmm3); |
2783 __ jmp(&allocate_return); | 2828 __ jmp(&allocate_return); |
2784 | 2829 |
2785 // exponent (or both) is a heapnumber - no matter what we should now work | 2830 // exponent (or both) is a heapnumber - no matter what we should now work |
2786 // on doubles. | 2831 // on doubles. |
2787 __ bind(&exponent_nonsmi); | 2832 __ bind(&exponent_nonsmi); |
2788 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), | 2833 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), |
2789 factory->heap_number_map()); | 2834 factory->heap_number_map()); |
2790 __ j(not_equal, &call_runtime); | 2835 __ j(not_equal, &call_runtime); |
2791 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 2836 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
2792 // Test if exponent is nan. | 2837 // Test if exponent is nan. |
2793 __ ucomisd(xmm1, xmm1); | 2838 __ ucomisd(xmm1, xmm1); |
2794 __ j(parity_even, &call_runtime); | 2839 __ j(parity_even, &call_runtime); |
2795 | 2840 |
2796 Label base_not_smi; | 2841 Label base_not_smi; |
2797 Label handle_special_cases; | 2842 Label handle_special_cases; |
2798 __ JumpIfNotSmi(edx, &base_not_smi, Label::kNear); | 2843 __ JumpIfNotSmi(edx, &base_not_smi, Label::kNear); |
2799 __ SmiUntag(edx); | 2844 __ SmiUntag(edx); |
2800 __ cvtsi2sd(xmm0, Operand(edx)); | 2845 __ cvtsi2sd(xmm0, edx); |
2801 __ jmp(&handle_special_cases, Label::kNear); | 2846 __ jmp(&handle_special_cases, Label::kNear); |
2802 | 2847 |
2803 __ bind(&base_not_smi); | 2848 __ bind(&base_not_smi); |
2804 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), | 2849 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), |
2805 factory->heap_number_map()); | 2850 factory->heap_number_map()); |
2806 __ j(not_equal, &call_runtime); | 2851 __ j(not_equal, &call_runtime); |
2807 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); | 2852 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); |
2808 __ and_(ecx, HeapNumber::kExponentMask); | 2853 __ and_(ecx, HeapNumber::kExponentMask); |
2809 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask)); | 2854 __ cmp(ecx, Immediate(HeapNumber::kExponentMask)); |
2810 // base is NaN or +/-Infinity | 2855 // base is NaN or +/-Infinity |
2811 __ j(greater_equal, &call_runtime); | 2856 __ j(greater_equal, &call_runtime); |
2812 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); | 2857 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); |
2813 | 2858 |
2814 // base is in xmm0 and exponent is in xmm1. | 2859 // base is in xmm0 and exponent is in xmm1. |
2815 __ bind(&handle_special_cases); | 2860 __ bind(&handle_special_cases); |
2816 Label not_minus_half; | 2861 Label not_minus_half; |
2817 // Test for -0.5. | 2862 // Test for -0.5. |
2818 // Load xmm2 with -0.5. | 2863 // Load xmm2 with -0.5. |
2819 __ mov(ecx, Immediate(0xBF000000)); | 2864 __ mov(ecx, Immediate(0xBF000000)); |
2820 __ movd(xmm2, Operand(ecx)); | 2865 __ movd(xmm2, ecx); |
2821 __ cvtss2sd(xmm2, xmm2); | 2866 __ cvtss2sd(xmm2, xmm2); |
2822 // xmm2 now has -0.5. | 2867 // xmm2 now has -0.5. |
2823 __ ucomisd(xmm2, xmm1); | 2868 __ ucomisd(xmm2, xmm1); |
2824 __ j(not_equal, ¬_minus_half, Label::kNear); | 2869 __ j(not_equal, ¬_minus_half, Label::kNear); |
2825 | 2870 |
2826 // Calculates reciprocal of square root. | 2871 // Calculates reciprocal of square root. |
2827 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. | 2872 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. |
2828 __ xorps(xmm1, xmm1); | 2873 __ xorps(xmm1, xmm1); |
2829 __ addsd(xmm1, xmm0); | 2874 __ addsd(xmm1, xmm0); |
2830 __ sqrtsd(xmm1, xmm1); | 2875 __ sqrtsd(xmm1, xmm1); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2866 static const int kDisplacement = 1 * kPointerSize; | 2911 static const int kDisplacement = 1 * kPointerSize; |
2867 | 2912 |
2868 // Check that the key is a smi. | 2913 // Check that the key is a smi. |
2869 Label slow; | 2914 Label slow; |
2870 __ JumpIfNotSmi(edx, &slow, Label::kNear); | 2915 __ JumpIfNotSmi(edx, &slow, Label::kNear); |
2871 | 2916 |
2872 // Check if the calling frame is an arguments adaptor frame. | 2917 // Check if the calling frame is an arguments adaptor frame. |
2873 Label adaptor; | 2918 Label adaptor; |
2874 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2919 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
2875 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset)); | 2920 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset)); |
2876 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2921 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
2877 __ j(equal, &adaptor, Label::kNear); | 2922 __ j(equal, &adaptor, Label::kNear); |
2878 | 2923 |
2879 // Check index against formal parameters count limit passed in | 2924 // Check index against formal parameters count limit passed in |
2880 // through register eax. Use unsigned comparison to get negative | 2925 // through register eax. Use unsigned comparison to get negative |
2881 // check for free. | 2926 // check for free. |
2882 __ cmp(edx, Operand(eax)); | 2927 __ cmp(edx, eax); |
2883 __ j(above_equal, &slow, Label::kNear); | 2928 __ j(above_equal, &slow, Label::kNear); |
2884 | 2929 |
2885 // Read the argument from the stack and return it. | 2930 // Read the argument from the stack and return it. |
2886 STATIC_ASSERT(kSmiTagSize == 1); | 2931 STATIC_ASSERT(kSmiTagSize == 1); |
2887 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. | 2932 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. |
2888 __ lea(ebx, Operand(ebp, eax, times_2, 0)); | 2933 __ lea(ebx, Operand(ebp, eax, times_2, 0)); |
2889 __ neg(edx); | 2934 __ neg(edx); |
2890 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); | 2935 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); |
2891 __ ret(0); | 2936 __ ret(0); |
2892 | 2937 |
2893 // Arguments adaptor case: Check index against actual arguments | 2938 // Arguments adaptor case: Check index against actual arguments |
2894 // limit found in the arguments adaptor frame. Use unsigned | 2939 // limit found in the arguments adaptor frame. Use unsigned |
2895 // comparison to get negative check for free. | 2940 // comparison to get negative check for free. |
2896 __ bind(&adaptor); | 2941 __ bind(&adaptor); |
2897 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2942 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2898 __ cmp(edx, Operand(ecx)); | 2943 __ cmp(edx, ecx); |
2899 __ j(above_equal, &slow, Label::kNear); | 2944 __ j(above_equal, &slow, Label::kNear); |
2900 | 2945 |
2901 // Read the argument from the stack and return it. | 2946 // Read the argument from the stack and return it. |
2902 STATIC_ASSERT(kSmiTagSize == 1); | 2947 STATIC_ASSERT(kSmiTagSize == 1); |
2903 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. | 2948 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. |
2904 __ lea(ebx, Operand(ebx, ecx, times_2, 0)); | 2949 __ lea(ebx, Operand(ebx, ecx, times_2, 0)); |
2905 __ neg(edx); | 2950 __ neg(edx); |
2906 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); | 2951 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); |
2907 __ ret(0); | 2952 __ ret(0); |
2908 | 2953 |
(...skipping 10 matching lines...) Expand all Loading... |
2919 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { | 2964 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) { |
2920 // esp[0] : return address | 2965 // esp[0] : return address |
2921 // esp[4] : number of parameters | 2966 // esp[4] : number of parameters |
2922 // esp[8] : receiver displacement | 2967 // esp[8] : receiver displacement |
2923 // esp[12] : function | 2968 // esp[12] : function |
2924 | 2969 |
2925 // Check if the calling frame is an arguments adaptor frame. | 2970 // Check if the calling frame is an arguments adaptor frame. |
2926 Label runtime; | 2971 Label runtime; |
2927 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 2972 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
2928 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); | 2973 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); |
2929 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2974 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
2930 __ j(not_equal, &runtime, Label::kNear); | 2975 __ j(not_equal, &runtime, Label::kNear); |
2931 | 2976 |
2932 // Patch the arguments.length and the parameters pointer. | 2977 // Patch the arguments.length and the parameters pointer. |
2933 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2978 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2934 __ mov(Operand(esp, 1 * kPointerSize), ecx); | 2979 __ mov(Operand(esp, 1 * kPointerSize), ecx); |
2935 __ lea(edx, Operand(edx, ecx, times_2, | 2980 __ lea(edx, Operand(edx, ecx, times_2, |
2936 StandardFrameConstants::kCallerSPOffset)); | 2981 StandardFrameConstants::kCallerSPOffset)); |
2937 __ mov(Operand(esp, 2 * kPointerSize), edx); | 2982 __ mov(Operand(esp, 2 * kPointerSize), edx); |
2938 | 2983 |
2939 __ bind(&runtime); | 2984 __ bind(&runtime); |
(...skipping 10 matching lines...) Expand all Loading... |
2950 // ebx = parameter count (tagged) | 2995 // ebx = parameter count (tagged) |
2951 __ mov(ebx, Operand(esp, 1 * kPointerSize)); | 2996 __ mov(ebx, Operand(esp, 1 * kPointerSize)); |
2952 | 2997 |
2953 // Check if the calling frame is an arguments adaptor frame. | 2998 // Check if the calling frame is an arguments adaptor frame. |
2954 // TODO(rossberg): Factor out some of the bits that are shared with the other | 2999 // TODO(rossberg): Factor out some of the bits that are shared with the other |
2955 // Generate* functions. | 3000 // Generate* functions. |
2956 Label runtime; | 3001 Label runtime; |
2957 Label adaptor_frame, try_allocate; | 3002 Label adaptor_frame, try_allocate; |
2958 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 3003 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
2959 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); | 3004 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); |
2960 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3005 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
2961 __ j(equal, &adaptor_frame, Label::kNear); | 3006 __ j(equal, &adaptor_frame, Label::kNear); |
2962 | 3007 |
2963 // No adaptor, parameter count = argument count. | 3008 // No adaptor, parameter count = argument count. |
2964 __ mov(ecx, ebx); | 3009 __ mov(ecx, ebx); |
2965 __ jmp(&try_allocate, Label::kNear); | 3010 __ jmp(&try_allocate, Label::kNear); |
2966 | 3011 |
2967 // We have an adaptor frame. Patch the parameters pointer. | 3012 // We have an adaptor frame. Patch the parameters pointer. |
2968 __ bind(&adaptor_frame); | 3013 __ bind(&adaptor_frame); |
2969 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 3014 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
2970 __ lea(edx, Operand(edx, ecx, times_2, | 3015 __ lea(edx, Operand(edx, ecx, times_2, |
2971 StandardFrameConstants::kCallerSPOffset)); | 3016 StandardFrameConstants::kCallerSPOffset)); |
2972 __ mov(Operand(esp, 2 * kPointerSize), edx); | 3017 __ mov(Operand(esp, 2 * kPointerSize), edx); |
2973 | 3018 |
2974 // ebx = parameter count (tagged) | 3019 // ebx = parameter count (tagged) |
2975 // ecx = argument count (tagged) | 3020 // ecx = argument count (tagged) |
2976 // esp[4] = parameter count (tagged) | 3021 // esp[4] = parameter count (tagged) |
2977 // esp[8] = address of receiver argument | 3022 // esp[8] = address of receiver argument |
2978 // Compute the mapped parameter count = min(ebx, ecx) in ebx. | 3023 // Compute the mapped parameter count = min(ebx, ecx) in ebx. |
2979 __ cmp(ebx, Operand(ecx)); | 3024 __ cmp(ebx, ecx); |
2980 __ j(less_equal, &try_allocate, Label::kNear); | 3025 __ j(less_equal, &try_allocate, Label::kNear); |
2981 __ mov(ebx, ecx); | 3026 __ mov(ebx, ecx); |
2982 | 3027 |
2983 __ bind(&try_allocate); | 3028 __ bind(&try_allocate); |
2984 | 3029 |
2985 // Save mapped parameter count. | 3030 // Save mapped parameter count. |
2986 __ push(ebx); | 3031 __ push(ebx); |
2987 | 3032 |
2988 // Compute the sizes of backing store, parameter map, and arguments object. | 3033 // Compute the sizes of backing store, parameter map, and arguments object. |
2989 // 1. Parameter map, has 2 extra words containing context and backing store. | 3034 // 1. Parameter map, has 2 extra words containing context and backing store. |
2990 const int kParameterMapHeaderSize = | 3035 const int kParameterMapHeaderSize = |
2991 FixedArray::kHeaderSize + 2 * kPointerSize; | 3036 FixedArray::kHeaderSize + 2 * kPointerSize; |
2992 Label no_parameter_map; | 3037 Label no_parameter_map; |
2993 __ test(ebx, Operand(ebx)); | 3038 __ test(ebx, ebx); |
2994 __ j(zero, &no_parameter_map, Label::kNear); | 3039 __ j(zero, &no_parameter_map, Label::kNear); |
2995 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize)); | 3040 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize)); |
2996 __ bind(&no_parameter_map); | 3041 __ bind(&no_parameter_map); |
2997 | 3042 |
2998 // 2. Backing store. | 3043 // 2. Backing store. |
2999 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); | 3044 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); |
3000 | 3045 |
3001 // 3. Arguments object. | 3046 // 3. Arguments object. |
3002 __ add(Operand(ebx), Immediate(Heap::kArgumentsObjectSize)); | 3047 __ add(ebx, Immediate(Heap::kArgumentsObjectSize)); |
3003 | 3048 |
3004 // Do the allocation of all three objects in one go. | 3049 // Do the allocation of all three objects in one go. |
3005 __ AllocateInNewSpace(ebx, eax, edx, edi, &runtime, TAG_OBJECT); | 3050 __ AllocateInNewSpace(ebx, eax, edx, edi, &runtime, TAG_OBJECT); |
3006 | 3051 |
3007 // eax = address of new object(s) (tagged) | 3052 // eax = address of new object(s) (tagged) |
3008 // ecx = argument count (tagged) | 3053 // ecx = argument count (tagged) |
3009 // esp[0] = mapped parameter count (tagged) | 3054 // esp[0] = mapped parameter count (tagged) |
3010 // esp[8] = parameter count (tagged) | 3055 // esp[8] = parameter count (tagged) |
3011 // esp[12] = address of receiver argument | 3056 // esp[12] = address of receiver argument |
3012 // Get the arguments boilerplate from the current (global) context into edi. | 3057 // Get the arguments boilerplate from the current (global) context into edi. |
3013 Label has_mapped_parameters, copy; | 3058 Label has_mapped_parameters, copy; |
3014 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 3059 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
3015 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset)); | 3060 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset)); |
3016 __ mov(ebx, Operand(esp, 0 * kPointerSize)); | 3061 __ mov(ebx, Operand(esp, 0 * kPointerSize)); |
3017 __ test(ebx, Operand(ebx)); | 3062 __ test(ebx, ebx); |
3018 __ j(not_zero, &has_mapped_parameters, Label::kNear); | 3063 __ j(not_zero, &has_mapped_parameters, Label::kNear); |
3019 __ mov(edi, Operand(edi, | 3064 __ mov(edi, Operand(edi, |
3020 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX))); | 3065 Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX))); |
3021 __ jmp(©, Label::kNear); | 3066 __ jmp(©, Label::kNear); |
3022 | 3067 |
3023 __ bind(&has_mapped_parameters); | 3068 __ bind(&has_mapped_parameters); |
3024 __ mov(edi, Operand(edi, | 3069 __ mov(edi, Operand(edi, |
3025 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX))); | 3070 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX))); |
3026 __ bind(©); | 3071 __ bind(©); |
3027 | 3072 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3062 // ecx = argument count (tagged) | 3107 // ecx = argument count (tagged) |
3063 // edi = address of parameter map or backing store (tagged) | 3108 // edi = address of parameter map or backing store (tagged) |
3064 // esp[0] = mapped parameter count (tagged) | 3109 // esp[0] = mapped parameter count (tagged) |
3065 // esp[8] = parameter count (tagged) | 3110 // esp[8] = parameter count (tagged) |
3066 // esp[12] = address of receiver argument | 3111 // esp[12] = address of receiver argument |
3067 // Free a register. | 3112 // Free a register. |
3068 __ push(eax); | 3113 __ push(eax); |
3069 | 3114 |
3070 // Initialize parameter map. If there are no mapped arguments, we're done. | 3115 // Initialize parameter map. If there are no mapped arguments, we're done. |
3071 Label skip_parameter_map; | 3116 Label skip_parameter_map; |
3072 __ test(ebx, Operand(ebx)); | 3117 __ test(ebx, ebx); |
3073 __ j(zero, &skip_parameter_map); | 3118 __ j(zero, &skip_parameter_map); |
3074 | 3119 |
3075 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 3120 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
3076 Immediate(FACTORY->non_strict_arguments_elements_map())); | 3121 Immediate(FACTORY->non_strict_arguments_elements_map())); |
3077 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); | 3122 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); |
3078 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); | 3123 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); |
3079 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); | 3124 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); |
3080 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); | 3125 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); |
3081 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); | 3126 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); |
3082 | 3127 |
3083 // Copy the parameter slots and the holes in the arguments. | 3128 // Copy the parameter slots and the holes in the arguments. |
3084 // We need to fill in mapped_parameter_count slots. They index the context, | 3129 // We need to fill in mapped_parameter_count slots. They index the context, |
3085 // where parameters are stored in reverse order, at | 3130 // where parameters are stored in reverse order, at |
3086 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | 3131 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
3087 // The mapped parameter thus need to get indices | 3132 // The mapped parameter thus need to get indices |
3088 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | 3133 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
3089 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | 3134 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
3090 // We loop from right to left. | 3135 // We loop from right to left. |
3091 Label parameters_loop, parameters_test; | 3136 Label parameters_loop, parameters_test; |
3092 __ push(ecx); | 3137 __ push(ecx); |
3093 __ mov(eax, Operand(esp, 2 * kPointerSize)); | 3138 __ mov(eax, Operand(esp, 2 * kPointerSize)); |
3094 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); | 3139 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); |
3095 __ add(ebx, Operand(esp, 4 * kPointerSize)); | 3140 __ add(ebx, Operand(esp, 4 * kPointerSize)); |
3096 __ sub(ebx, Operand(eax)); | 3141 __ sub(ebx, eax); |
3097 __ mov(ecx, FACTORY->the_hole_value()); | 3142 __ mov(ecx, FACTORY->the_hole_value()); |
3098 __ mov(edx, edi); | 3143 __ mov(edx, edi); |
3099 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); | 3144 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); |
3100 // eax = loop variable (tagged) | 3145 // eax = loop variable (tagged) |
3101 // ebx = mapping index (tagged) | 3146 // ebx = mapping index (tagged) |
3102 // ecx = the hole value | 3147 // ecx = the hole value |
3103 // edx = address of parameter map (tagged) | 3148 // edx = address of parameter map (tagged) |
3104 // edi = address of backing store (tagged) | 3149 // edi = address of backing store (tagged) |
3105 // esp[0] = argument count (tagged) | 3150 // esp[0] = argument count (tagged) |
3106 // esp[4] = address of new object (tagged) | 3151 // esp[4] = address of new object (tagged) |
3107 // esp[8] = mapped parameter count (tagged) | 3152 // esp[8] = mapped parameter count (tagged) |
3108 // esp[16] = parameter count (tagged) | 3153 // esp[16] = parameter count (tagged) |
3109 // esp[20] = address of receiver argument | 3154 // esp[20] = address of receiver argument |
3110 __ jmp(¶meters_test, Label::kNear); | 3155 __ jmp(¶meters_test, Label::kNear); |
3111 | 3156 |
3112 __ bind(¶meters_loop); | 3157 __ bind(¶meters_loop); |
3113 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); | 3158 __ sub(eax, Immediate(Smi::FromInt(1))); |
3114 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx); | 3159 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx); |
3115 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx); | 3160 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx); |
3116 __ add(Operand(ebx), Immediate(Smi::FromInt(1))); | 3161 __ add(ebx, Immediate(Smi::FromInt(1))); |
3117 __ bind(¶meters_test); | 3162 __ bind(¶meters_test); |
3118 __ test(eax, Operand(eax)); | 3163 __ test(eax, eax); |
3119 __ j(not_zero, ¶meters_loop, Label::kNear); | 3164 __ j(not_zero, ¶meters_loop, Label::kNear); |
3120 __ pop(ecx); | 3165 __ pop(ecx); |
3121 | 3166 |
3122 __ bind(&skip_parameter_map); | 3167 __ bind(&skip_parameter_map); |
3123 | 3168 |
3124 // ecx = argument count (tagged) | 3169 // ecx = argument count (tagged) |
3125 // edi = address of backing store (tagged) | 3170 // edi = address of backing store (tagged) |
3126 // esp[0] = address of new object (tagged) | 3171 // esp[0] = address of new object (tagged) |
3127 // esp[4] = mapped parameter count (tagged) | 3172 // esp[4] = mapped parameter count (tagged) |
3128 // esp[12] = parameter count (tagged) | 3173 // esp[12] = parameter count (tagged) |
3129 // esp[16] = address of receiver argument | 3174 // esp[16] = address of receiver argument |
3130 // Copy arguments header and remaining slots (if there are any). | 3175 // Copy arguments header and remaining slots (if there are any). |
3131 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 3176 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
3132 Immediate(FACTORY->fixed_array_map())); | 3177 Immediate(FACTORY->fixed_array_map())); |
3133 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); | 3178 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); |
3134 | 3179 |
3135 Label arguments_loop, arguments_test; | 3180 Label arguments_loop, arguments_test; |
3136 __ mov(ebx, Operand(esp, 1 * kPointerSize)); | 3181 __ mov(ebx, Operand(esp, 1 * kPointerSize)); |
3137 __ mov(edx, Operand(esp, 4 * kPointerSize)); | 3182 __ mov(edx, Operand(esp, 4 * kPointerSize)); |
3138 __ sub(Operand(edx), ebx); // Is there a smarter way to do negative scaling? | 3183 __ sub(edx, ebx); // Is there a smarter way to do negative scaling? |
3139 __ sub(Operand(edx), ebx); | 3184 __ sub(edx, ebx); |
3140 __ jmp(&arguments_test, Label::kNear); | 3185 __ jmp(&arguments_test, Label::kNear); |
3141 | 3186 |
3142 __ bind(&arguments_loop); | 3187 __ bind(&arguments_loop); |
3143 __ sub(Operand(edx), Immediate(kPointerSize)); | 3188 __ sub(edx, Immediate(kPointerSize)); |
3144 __ mov(eax, Operand(edx, 0)); | 3189 __ mov(eax, Operand(edx, 0)); |
3145 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax); | 3190 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax); |
3146 __ add(Operand(ebx), Immediate(Smi::FromInt(1))); | 3191 __ add(ebx, Immediate(Smi::FromInt(1))); |
3147 | 3192 |
3148 __ bind(&arguments_test); | 3193 __ bind(&arguments_test); |
3149 __ cmp(ebx, Operand(ecx)); | 3194 __ cmp(ebx, ecx); |
3150 __ j(less, &arguments_loop, Label::kNear); | 3195 __ j(less, &arguments_loop, Label::kNear); |
3151 | 3196 |
3152 // Restore. | 3197 // Restore. |
3153 __ pop(eax); // Address of arguments object. | 3198 __ pop(eax); // Address of arguments object. |
3154 __ pop(ebx); // Parameter count. | 3199 __ pop(ebx); // Parameter count. |
3155 | 3200 |
3156 // Return and remove the on-stack parameters. | 3201 // Return and remove the on-stack parameters. |
3157 __ ret(3 * kPointerSize); | 3202 __ ret(3 * kPointerSize); |
3158 | 3203 |
3159 // Do the runtime call to allocate the arguments object. | 3204 // Do the runtime call to allocate the arguments object. |
3160 __ bind(&runtime); | 3205 __ bind(&runtime); |
3161 __ pop(eax); // Remove saved parameter count. | 3206 __ pop(eax); // Remove saved parameter count. |
3162 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count. | 3207 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count. |
3163 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); | 3208 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); |
3164 } | 3209 } |
3165 | 3210 |
3166 | 3211 |
3167 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 3212 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
3168 // esp[0] : return address | 3213 // esp[0] : return address |
3169 // esp[4] : number of parameters | 3214 // esp[4] : number of parameters |
3170 // esp[8] : receiver displacement | 3215 // esp[8] : receiver displacement |
3171 // esp[12] : function | 3216 // esp[12] : function |
3172 | 3217 |
3173 // Check if the calling frame is an arguments adaptor frame. | 3218 // Check if the calling frame is an arguments adaptor frame. |
3174 Label adaptor_frame, try_allocate, runtime; | 3219 Label adaptor_frame, try_allocate, runtime; |
3175 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); | 3220 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
3176 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); | 3221 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset)); |
3177 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3222 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
3178 __ j(equal, &adaptor_frame, Label::kNear); | 3223 __ j(equal, &adaptor_frame, Label::kNear); |
3179 | 3224 |
3180 // Get the length from the frame. | 3225 // Get the length from the frame. |
3181 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 3226 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
3182 __ jmp(&try_allocate, Label::kNear); | 3227 __ jmp(&try_allocate, Label::kNear); |
3183 | 3228 |
3184 // Patch the arguments.length and the parameters pointer. | 3229 // Patch the arguments.length and the parameters pointer. |
3185 __ bind(&adaptor_frame); | 3230 __ bind(&adaptor_frame); |
3186 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 3231 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
3187 __ mov(Operand(esp, 1 * kPointerSize), ecx); | 3232 __ mov(Operand(esp, 1 * kPointerSize), ecx); |
3188 __ lea(edx, Operand(edx, ecx, times_2, | 3233 __ lea(edx, Operand(edx, ecx, times_2, |
3189 StandardFrameConstants::kCallerSPOffset)); | 3234 StandardFrameConstants::kCallerSPOffset)); |
3190 __ mov(Operand(esp, 2 * kPointerSize), edx); | 3235 __ mov(Operand(esp, 2 * kPointerSize), edx); |
3191 | 3236 |
3192 // Try the new space allocation. Start out with computing the size of | 3237 // Try the new space allocation. Start out with computing the size of |
3193 // the arguments object and the elements array. | 3238 // the arguments object and the elements array. |
3194 Label add_arguments_object; | 3239 Label add_arguments_object; |
3195 __ bind(&try_allocate); | 3240 __ bind(&try_allocate); |
3196 __ test(ecx, Operand(ecx)); | 3241 __ test(ecx, ecx); |
3197 __ j(zero, &add_arguments_object, Label::kNear); | 3242 __ j(zero, &add_arguments_object, Label::kNear); |
3198 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize)); | 3243 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize)); |
3199 __ bind(&add_arguments_object); | 3244 __ bind(&add_arguments_object); |
3200 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSizeStrict)); | 3245 __ add(ecx, Immediate(Heap::kArgumentsObjectSizeStrict)); |
3201 | 3246 |
3202 // Do the allocation of both objects in one go. | 3247 // Do the allocation of both objects in one go. |
3203 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT); | 3248 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT); |
3204 | 3249 |
3205 // Get the arguments boilerplate from the current (global) context. | 3250 // Get the arguments boilerplate from the current (global) context. |
3206 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 3251 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
3207 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset)); | 3252 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset)); |
3208 const int offset = | 3253 const int offset = |
3209 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); | 3254 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX); |
3210 __ mov(edi, Operand(edi, offset)); | 3255 __ mov(edi, Operand(edi, offset)); |
3211 | 3256 |
3212 // Copy the JS object part. | 3257 // Copy the JS object part. |
3213 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 3258 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
3214 __ mov(ebx, FieldOperand(edi, i)); | 3259 __ mov(ebx, FieldOperand(edi, i)); |
3215 __ mov(FieldOperand(eax, i), ebx); | 3260 __ mov(FieldOperand(eax, i), ebx); |
3216 } | 3261 } |
3217 | 3262 |
3218 // Get the length (smi tagged) and set that as an in-object property too. | 3263 // Get the length (smi tagged) and set that as an in-object property too. |
3219 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 3264 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
3220 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 3265 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
3221 __ mov(FieldOperand(eax, JSObject::kHeaderSize + | 3266 __ mov(FieldOperand(eax, JSObject::kHeaderSize + |
3222 Heap::kArgumentsLengthIndex * kPointerSize), | 3267 Heap::kArgumentsLengthIndex * kPointerSize), |
3223 ecx); | 3268 ecx); |
3224 | 3269 |
3225 // If there are no actual arguments, we're done. | 3270 // If there are no actual arguments, we're done. |
3226 Label done; | 3271 Label done; |
3227 __ test(ecx, Operand(ecx)); | 3272 __ test(ecx, ecx); |
3228 __ j(zero, &done, Label::kNear); | 3273 __ j(zero, &done, Label::kNear); |
3229 | 3274 |
3230 // Get the parameters pointer from the stack. | 3275 // Get the parameters pointer from the stack. |
3231 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 3276 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
3232 | 3277 |
3233 // Setup the elements pointer in the allocated arguments object and | 3278 // Setup the elements pointer in the allocated arguments object and |
3234 // initialize the header in the elements fixed array. | 3279 // initialize the header in the elements fixed array. |
3235 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict)); | 3280 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict)); |
3236 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); | 3281 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); |
3237 __ mov(FieldOperand(edi, FixedArray::kMapOffset), | 3282 __ mov(FieldOperand(edi, FixedArray::kMapOffset), |
3238 Immediate(FACTORY->fixed_array_map())); | 3283 Immediate(FACTORY->fixed_array_map())); |
3239 | 3284 |
3240 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); | 3285 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); |
3241 // Untag the length for the loop below. | 3286 // Untag the length for the loop below. |
3242 __ SmiUntag(ecx); | 3287 __ SmiUntag(ecx); |
3243 | 3288 |
3244 // Copy the fixed array slots. | 3289 // Copy the fixed array slots. |
3245 Label loop; | 3290 Label loop; |
3246 __ bind(&loop); | 3291 __ bind(&loop); |
3247 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. | 3292 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. |
3248 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); | 3293 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); |
3249 __ add(Operand(edi), Immediate(kPointerSize)); | 3294 __ add(edi, Immediate(kPointerSize)); |
3250 __ sub(Operand(edx), Immediate(kPointerSize)); | 3295 __ sub(edx, Immediate(kPointerSize)); |
3251 __ dec(ecx); | 3296 __ dec(ecx); |
3252 __ j(not_zero, &loop); | 3297 __ j(not_zero, &loop); |
3253 | 3298 |
3254 // Return and remove the on-stack parameters. | 3299 // Return and remove the on-stack parameters. |
3255 __ bind(&done); | 3300 __ bind(&done); |
3256 __ ret(3 * kPointerSize); | 3301 __ ret(3 * kPointerSize); |
3257 | 3302 |
3258 // Do the runtime call to allocate the arguments object. | 3303 // Do the runtime call to allocate the arguments object. |
3259 __ bind(&runtime); | 3304 __ bind(&runtime); |
3260 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); | 3305 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); |
(...skipping 26 matching lines...) Expand all Loading... |
3287 | 3332 |
3288 Label runtime, invoke_regexp; | 3333 Label runtime, invoke_regexp; |
3289 | 3334 |
3290 // Ensure that a RegExp stack is allocated. | 3335 // Ensure that a RegExp stack is allocated. |
3291 ExternalReference address_of_regexp_stack_memory_address = | 3336 ExternalReference address_of_regexp_stack_memory_address = |
3292 ExternalReference::address_of_regexp_stack_memory_address( | 3337 ExternalReference::address_of_regexp_stack_memory_address( |
3293 masm->isolate()); | 3338 masm->isolate()); |
3294 ExternalReference address_of_regexp_stack_memory_size = | 3339 ExternalReference address_of_regexp_stack_memory_size = |
3295 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); | 3340 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); |
3296 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); | 3341 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); |
3297 __ test(ebx, Operand(ebx)); | 3342 __ test(ebx, ebx); |
3298 __ j(zero, &runtime); | 3343 __ j(zero, &runtime); |
3299 | 3344 |
3300 // Check that the first argument is a JSRegExp object. | 3345 // Check that the first argument is a JSRegExp object. |
3301 __ mov(eax, Operand(esp, kJSRegExpOffset)); | 3346 __ mov(eax, Operand(esp, kJSRegExpOffset)); |
3302 STATIC_ASSERT(kSmiTag == 0); | 3347 STATIC_ASSERT(kSmiTag == 0); |
3303 __ JumpIfSmi(eax, &runtime); | 3348 __ JumpIfSmi(eax, &runtime); |
3304 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); | 3349 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); |
3305 __ j(not_equal, &runtime); | 3350 __ j(not_equal, &runtime); |
3306 // Check that the RegExp has been compiled (data contains a fixed array). | 3351 // Check that the RegExp has been compiled (data contains a fixed array). |
3307 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); | 3352 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); |
3308 if (FLAG_debug_code) { | 3353 if (FLAG_debug_code) { |
3309 __ test(ecx, Immediate(kSmiTagMask)); | 3354 __ test(ecx, Immediate(kSmiTagMask)); |
3310 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected"); | 3355 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected"); |
3311 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx); | 3356 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx); |
3312 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); | 3357 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); |
3313 } | 3358 } |
3314 | 3359 |
3315 // ecx: RegExp data (FixedArray) | 3360 // ecx: RegExp data (FixedArray) |
3316 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 3361 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
3317 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset)); | 3362 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset)); |
3318 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP))); | 3363 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP))); |
3319 __ j(not_equal, &runtime); | 3364 __ j(not_equal, &runtime); |
3320 | 3365 |
3321 // ecx: RegExp data (FixedArray) | 3366 // ecx: RegExp data (FixedArray) |
3322 // Check that the number of captures fit in the static offsets vector buffer. | 3367 // Check that the number of captures fit in the static offsets vector buffer. |
3323 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); | 3368 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); |
3324 // Calculate number of capture registers (number_of_captures + 1) * 2. This | 3369 // Calculate number of capture registers (number_of_captures + 1) * 2. This |
3325 // uses the asumption that smis are 2 * their untagged value. | 3370 // uses the asumption that smis are 2 * their untagged value. |
3326 STATIC_ASSERT(kSmiTag == 0); | 3371 STATIC_ASSERT(kSmiTag == 0); |
3327 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); | 3372 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); |
3328 __ add(Operand(edx), Immediate(2)); // edx was a smi. | 3373 __ add(edx, Immediate(2)); // edx was a smi. |
3329 // Check that the static offsets vector buffer is large enough. | 3374 // Check that the static offsets vector buffer is large enough. |
3330 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize); | 3375 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize); |
3331 __ j(above, &runtime); | 3376 __ j(above, &runtime); |
3332 | 3377 |
3333 // ecx: RegExp data (FixedArray) | 3378 // ecx: RegExp data (FixedArray) |
3334 // edx: Number of capture registers | 3379 // edx: Number of capture registers |
3335 // Check that the second argument is a string. | 3380 // Check that the second argument is a string. |
3336 __ mov(eax, Operand(esp, kSubjectOffset)); | 3381 __ mov(eax, Operand(esp, kSubjectOffset)); |
3337 __ JumpIfSmi(eax, &runtime); | 3382 __ JumpIfSmi(eax, &runtime); |
3338 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx); | 3383 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx); |
3339 __ j(NegateCondition(is_string), &runtime); | 3384 __ j(NegateCondition(is_string), &runtime); |
3340 // Get the length of the string to ebx. | 3385 // Get the length of the string to ebx. |
3341 __ mov(ebx, FieldOperand(eax, String::kLengthOffset)); | 3386 __ mov(ebx, FieldOperand(eax, String::kLengthOffset)); |
3342 | 3387 |
3343 // ebx: Length of subject string as a smi | 3388 // ebx: Length of subject string as a smi |
3344 // ecx: RegExp data (FixedArray) | 3389 // ecx: RegExp data (FixedArray) |
3345 // edx: Number of capture registers | 3390 // edx: Number of capture registers |
3346 // Check that the third argument is a positive smi less than the subject | 3391 // Check that the third argument is a positive smi less than the subject |
3347 // string length. A negative value will be greater (unsigned comparison). | 3392 // string length. A negative value will be greater (unsigned comparison). |
3348 __ mov(eax, Operand(esp, kPreviousIndexOffset)); | 3393 __ mov(eax, Operand(esp, kPreviousIndexOffset)); |
3349 __ JumpIfNotSmi(eax, &runtime); | 3394 __ JumpIfNotSmi(eax, &runtime); |
3350 __ cmp(eax, Operand(ebx)); | 3395 __ cmp(eax, ebx); |
3351 __ j(above_equal, &runtime); | 3396 __ j(above_equal, &runtime); |
3352 | 3397 |
3353 // ecx: RegExp data (FixedArray) | 3398 // ecx: RegExp data (FixedArray) |
3354 // edx: Number of capture registers | 3399 // edx: Number of capture registers |
3355 // Check that the fourth object is a JSArray object. | 3400 // Check that the fourth object is a JSArray object. |
3356 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); | 3401 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); |
3357 __ JumpIfSmi(eax, &runtime); | 3402 __ JumpIfSmi(eax, &runtime); |
3358 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); | 3403 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); |
3359 __ j(not_equal, &runtime); | 3404 __ j(not_equal, &runtime); |
3360 // Check that the JSArray is in fast case. | 3405 // Check that the JSArray is in fast case. |
3361 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); | 3406 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); |
3362 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); | 3407 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); |
3363 Factory* factory = masm->isolate()->factory(); | 3408 Factory* factory = masm->isolate()->factory(); |
3364 __ cmp(eax, factory->fixed_array_map()); | 3409 __ cmp(eax, factory->fixed_array_map()); |
3365 __ j(not_equal, &runtime); | 3410 __ j(not_equal, &runtime); |
3366 // Check that the last match info has space for the capture registers and the | 3411 // Check that the last match info has space for the capture registers and the |
3367 // additional information. | 3412 // additional information. |
3368 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); | 3413 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); |
3369 __ SmiUntag(eax); | 3414 __ SmiUntag(eax); |
3370 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead)); | 3415 __ add(edx, Immediate(RegExpImpl::kLastMatchOverhead)); |
3371 __ cmp(edx, Operand(eax)); | 3416 __ cmp(edx, eax); |
3372 __ j(greater, &runtime); | 3417 __ j(greater, &runtime); |
3373 | 3418 |
3374 // Reset offset for possibly sliced string. | 3419 // Reset offset for possibly sliced string. |
3375 __ Set(edi, Immediate(0)); | 3420 __ Set(edi, Immediate(0)); |
3376 // ecx: RegExp data (FixedArray) | 3421 // ecx: RegExp data (FixedArray) |
3377 // Check the representation and encoding of the subject string. | 3422 // Check the representation and encoding of the subject string. |
3378 Label seq_ascii_string, seq_two_byte_string, check_code; | 3423 Label seq_ascii_string, seq_two_byte_string, check_code; |
3379 __ mov(eax, Operand(esp, kSubjectOffset)); | 3424 __ mov(eax, Operand(esp, kSubjectOffset)); |
3380 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); | 3425 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); |
3381 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); | 3426 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); |
3382 // First check for flat two byte string. | 3427 // First check for flat two byte string. |
3383 __ and_(ebx, | 3428 __ and_(ebx, |
3384 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); | 3429 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask); |
3385 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); | 3430 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); |
3386 __ j(zero, &seq_two_byte_string, Label::kNear); | 3431 __ j(zero, &seq_two_byte_string, Label::kNear); |
3387 // Any other flat string must be a flat ascii string. | 3432 // Any other flat string must be a flat ascii string. |
3388 __ and_(Operand(ebx), | 3433 __ and_(ebx, Immediate(kIsNotStringMask | kStringRepresentationMask)); |
3389 Immediate(kIsNotStringMask | kStringRepresentationMask)); | |
3390 __ j(zero, &seq_ascii_string, Label::kNear); | 3434 __ j(zero, &seq_ascii_string, Label::kNear); |
3391 | 3435 |
3392 // Check for flat cons string or sliced string. | 3436 // Check for flat cons string or sliced string. |
3393 // A flat cons string is a cons string where the second part is the empty | 3437 // A flat cons string is a cons string where the second part is the empty |
3394 // string. In that case the subject string is just the first part of the cons | 3438 // string. In that case the subject string is just the first part of the cons |
3395 // string. Also in this case the first part of the cons string is known to be | 3439 // string. Also in this case the first part of the cons string is known to be |
3396 // a sequential string or an external string. | 3440 // a sequential string or an external string. |
3397 // In the case of a sliced string its offset has to be taken into account. | 3441 // In the case of a sliced string its offset has to be taken into account. |
3398 Label cons_string, check_encoding; | 3442 Label cons_string, check_encoding; |
3399 STATIC_ASSERT(kConsStringTag < kExternalStringTag); | 3443 STATIC_ASSERT(kConsStringTag < kExternalStringTag); |
3400 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); | 3444 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); |
3401 __ cmp(Operand(ebx), Immediate(kExternalStringTag)); | 3445 __ cmp(ebx, Immediate(kExternalStringTag)); |
3402 __ j(less, &cons_string); | 3446 __ j(less, &cons_string); |
3403 __ j(equal, &runtime); | 3447 __ j(equal, &runtime); |
3404 | 3448 |
3405 // String is sliced. | 3449 // String is sliced. |
3406 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); | 3450 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); |
3407 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); | 3451 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); |
3408 // edi: offset of sliced string, smi-tagged. | 3452 // edi: offset of sliced string, smi-tagged. |
3409 // eax: parent string. | 3453 // eax: parent string. |
3410 __ jmp(&check_encoding, Label::kNear); | 3454 __ jmp(&check_encoding, Label::kNear); |
3411 // String is a cons string, check whether it is flat. | 3455 // String is a cons string, check whether it is flat. |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3497 // esi: original subject string | 3541 // esi: original subject string |
3498 // eax: underlying subject string | 3542 // eax: underlying subject string |
3499 // ebx: previous index | 3543 // ebx: previous index |
3500 // ecx: encoding of subject string (1 if ascii 0 if two_byte); | 3544 // ecx: encoding of subject string (1 if ascii 0 if two_byte); |
3501 // edx: code | 3545 // edx: code |
3502 // Argument 4: End of string data | 3546 // Argument 4: End of string data |
3503 // Argument 3: Start of string data | 3547 // Argument 3: Start of string data |
3504 // Prepare start and end index of the input. | 3548 // Prepare start and end index of the input. |
3505 // Load the length from the original sliced string if that is the case. | 3549 // Load the length from the original sliced string if that is the case. |
3506 __ mov(esi, FieldOperand(esi, String::kLengthOffset)); | 3550 __ mov(esi, FieldOperand(esi, String::kLengthOffset)); |
3507 __ add(esi, Operand(edi)); // Calculate input end wrt offset. | 3551 __ add(esi, edi); // Calculate input end wrt offset. |
3508 __ SmiUntag(edi); | 3552 __ SmiUntag(edi); |
3509 __ add(ebx, Operand(edi)); // Calculate input start wrt offset. | 3553 __ add(ebx, edi); // Calculate input start wrt offset. |
3510 | 3554 |
3511 // ebx: start index of the input string | 3555 // ebx: start index of the input string |
3512 // esi: end index of the input string | 3556 // esi: end index of the input string |
3513 Label setup_two_byte, setup_rest; | 3557 Label setup_two_byte, setup_rest; |
3514 __ test(ecx, Operand(ecx)); | 3558 __ test(ecx, ecx); |
3515 __ j(zero, &setup_two_byte, Label::kNear); | 3559 __ j(zero, &setup_two_byte, Label::kNear); |
3516 __ SmiUntag(esi); | 3560 __ SmiUntag(esi); |
3517 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize)); | 3561 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize)); |
3518 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. | 3562 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. |
3519 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize)); | 3563 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize)); |
3520 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. | 3564 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. |
3521 __ jmp(&setup_rest, Label::kNear); | 3565 __ jmp(&setup_rest, Label::kNear); |
3522 | 3566 |
3523 __ bind(&setup_two_byte); | 3567 __ bind(&setup_two_byte); |
3524 STATIC_ASSERT(kSmiTag == 0); | 3568 STATIC_ASSERT(kSmiTag == 0); |
3525 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2). | 3569 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2). |
3526 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); | 3570 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); |
3527 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. | 3571 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. |
3528 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); | 3572 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); |
3529 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. | 3573 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. |
3530 | 3574 |
3531 __ bind(&setup_rest); | 3575 __ bind(&setup_rest); |
3532 | 3576 |
3533 // Locate the code entry and call it. | 3577 // Locate the code entry and call it. |
3534 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); | 3578 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
3535 __ call(Operand(edx)); | 3579 __ call(edx); |
3536 | 3580 |
3537 // Drop arguments and come back to JS mode. | 3581 // Drop arguments and come back to JS mode. |
3538 __ LeaveApiExitFrame(); | 3582 __ LeaveApiExitFrame(); |
3539 | 3583 |
3540 // Check the result. | 3584 // Check the result. |
3541 Label success; | 3585 Label success; |
3542 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS); | 3586 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS); |
3543 __ j(equal, &success); | 3587 __ j(equal, &success); |
3544 Label failure; | 3588 Label failure; |
3545 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); | 3589 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); |
3546 __ j(equal, &failure); | 3590 __ j(equal, &failure); |
3547 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); | 3591 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); |
3548 // If not exception it can only be retry. Handle that in the runtime system. | 3592 // If not exception it can only be retry. Handle that in the runtime system. |
3549 __ j(not_equal, &runtime); | 3593 __ j(not_equal, &runtime); |
3550 // Result must now be exception. If there is no pending exception already a | 3594 // Result must now be exception. If there is no pending exception already a |
3551 // stack overflow (on the backtrack stack) was detected in RegExp code but | 3595 // stack overflow (on the backtrack stack) was detected in RegExp code but |
3552 // haven't created the exception yet. Handle that in the runtime system. | 3596 // haven't created the exception yet. Handle that in the runtime system. |
3553 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 3597 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
3554 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, | 3598 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, |
3555 masm->isolate()); | 3599 masm->isolate()); |
3556 __ mov(edx, | 3600 __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); |
3557 Operand::StaticVariable(ExternalReference::the_hole_value_location( | |
3558 masm->isolate()))); | |
3559 __ mov(eax, Operand::StaticVariable(pending_exception)); | 3601 __ mov(eax, Operand::StaticVariable(pending_exception)); |
3560 __ cmp(edx, Operand(eax)); | 3602 __ cmp(edx, eax); |
3561 __ j(equal, &runtime); | 3603 __ j(equal, &runtime); |
3562 // For exception, throw the exception again. | 3604 // For exception, throw the exception again. |
3563 | 3605 |
3564 // Clear the pending exception variable. | 3606 // Clear the pending exception variable. |
3565 __ mov(Operand::StaticVariable(pending_exception), edx); | 3607 __ mov(Operand::StaticVariable(pending_exception), edx); |
3566 | 3608 |
3567 // Special handling of termination exceptions which are uncatchable | 3609 // Special handling of termination exceptions which are uncatchable |
3568 // by javascript code. | 3610 // by javascript code. |
3569 __ cmp(eax, factory->termination_exception()); | 3611 __ cmp(eax, factory->termination_exception()); |
3570 Label throw_termination_exception; | 3612 Label throw_termination_exception; |
3571 __ j(equal, &throw_termination_exception, Label::kNear); | 3613 __ j(equal, &throw_termination_exception, Label::kNear); |
3572 | 3614 |
3573 // Handle normal exception by following handler chain. | 3615 // Handle normal exception by following handler chain. |
3574 __ Throw(eax); | 3616 __ Throw(eax); |
3575 | 3617 |
3576 __ bind(&throw_termination_exception); | 3618 __ bind(&throw_termination_exception); |
3577 __ ThrowUncatchable(TERMINATION, eax); | 3619 __ ThrowUncatchable(TERMINATION, eax); |
3578 | 3620 |
3579 __ bind(&failure); | 3621 __ bind(&failure); |
3580 // For failure to match, return null. | 3622 // For failure to match, return null. |
3581 __ mov(Operand(eax), factory->null_value()); | 3623 __ mov(eax, factory->null_value()); |
3582 __ ret(4 * kPointerSize); | 3624 __ ret(4 * kPointerSize); |
3583 | 3625 |
3584 // Load RegExp data. | 3626 // Load RegExp data. |
3585 __ bind(&success); | 3627 __ bind(&success); |
3586 __ mov(eax, Operand(esp, kJSRegExpOffset)); | 3628 __ mov(eax, Operand(esp, kJSRegExpOffset)); |
3587 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); | 3629 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); |
3588 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); | 3630 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); |
3589 // Calculate number of capture registers (number_of_captures + 1) * 2. | 3631 // Calculate number of capture registers (number_of_captures + 1) * 2. |
3590 STATIC_ASSERT(kSmiTag == 0); | 3632 STATIC_ASSERT(kSmiTag == 0); |
3591 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); | 3633 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); |
3592 __ add(Operand(edx), Immediate(2)); // edx was a smi. | 3634 __ add(edx, Immediate(2)); // edx was a smi. |
3593 | 3635 |
3594 // edx: Number of capture registers | 3636 // edx: Number of capture registers |
3595 // Load last_match_info which is still known to be a fast case JSArray. | 3637 // Load last_match_info which is still known to be a fast case JSArray. |
3596 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); | 3638 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); |
3597 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); | 3639 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); |
3598 | 3640 |
3599 // ebx: last_match_info backing store (FixedArray) | 3641 // ebx: last_match_info backing store (FixedArray) |
3600 // edx: number of capture registers | 3642 // edx: number of capture registers |
3601 // Store the capture count. | 3643 // Store the capture count. |
3602 __ SmiTag(edx); // Number of capture registers to smi. | 3644 __ SmiTag(edx); // Number of capture registers to smi. |
3603 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx); | 3645 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx); |
3604 __ SmiUntag(edx); // Number of capture registers back from smi. | 3646 __ SmiUntag(edx); // Number of capture registers back from smi. |
3605 // Store last subject and last input. | 3647 // Store last subject and last input. |
3606 __ mov(eax, Operand(esp, kSubjectOffset)); | 3648 __ mov(eax, Operand(esp, kSubjectOffset)); |
3607 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); | 3649 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); |
3608 __ mov(ecx, ebx); | 3650 __ RecordWriteField(ebx, |
3609 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi); | 3651 RegExpImpl::kLastSubjectOffset, |
| 3652 eax, |
| 3653 edi, |
| 3654 kDontSaveFPRegs); |
3610 __ mov(eax, Operand(esp, kSubjectOffset)); | 3655 __ mov(eax, Operand(esp, kSubjectOffset)); |
3611 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); | 3656 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); |
3612 __ mov(ecx, ebx); | 3657 __ RecordWriteField(ebx, |
3613 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi); | 3658 RegExpImpl::kLastInputOffset, |
| 3659 eax, |
| 3660 edi, |
| 3661 kDontSaveFPRegs); |
3614 | 3662 |
3615 // Get the static offsets vector filled by the native regexp code. | 3663 // Get the static offsets vector filled by the native regexp code. |
3616 ExternalReference address_of_static_offsets_vector = | 3664 ExternalReference address_of_static_offsets_vector = |
3617 ExternalReference::address_of_static_offsets_vector(masm->isolate()); | 3665 ExternalReference::address_of_static_offsets_vector(masm->isolate()); |
3618 __ mov(ecx, Immediate(address_of_static_offsets_vector)); | 3666 __ mov(ecx, Immediate(address_of_static_offsets_vector)); |
3619 | 3667 |
3620 // ebx: last_match_info backing store (FixedArray) | 3668 // ebx: last_match_info backing store (FixedArray) |
3621 // ecx: offsets vector | 3669 // ecx: offsets vector |
3622 // edx: number of capture registers | 3670 // edx: number of capture registers |
3623 Label next_capture, done; | 3671 Label next_capture, done; |
3624 // Capture register counter starts from number of capture registers and | 3672 // Capture register counter starts from number of capture registers and |
3625 // counts down until wraping after zero. | 3673 // counts down until wraping after zero. |
3626 __ bind(&next_capture); | 3674 __ bind(&next_capture); |
3627 __ sub(Operand(edx), Immediate(1)); | 3675 __ sub(edx, Immediate(1)); |
3628 __ j(negative, &done, Label::kNear); | 3676 __ j(negative, &done, Label::kNear); |
3629 // Read the value from the static offsets vector buffer. | 3677 // Read the value from the static offsets vector buffer. |
3630 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); | 3678 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); |
3631 __ SmiTag(edi); | 3679 __ SmiTag(edi); |
3632 // Store the smi value in the last match info. | 3680 // Store the smi value in the last match info. |
3633 __ mov(FieldOperand(ebx, | 3681 __ mov(FieldOperand(ebx, |
3634 edx, | 3682 edx, |
3635 times_pointer_size, | 3683 times_pointer_size, |
3636 RegExpImpl::kFirstCaptureOffset), | 3684 RegExpImpl::kFirstCaptureOffset), |
3637 edi); | 3685 edi); |
(...skipping 10 matching lines...) Expand all Loading... |
3648 #endif // V8_INTERPRETED_REGEXP | 3696 #endif // V8_INTERPRETED_REGEXP |
3649 } | 3697 } |
3650 | 3698 |
3651 | 3699 |
3652 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | 3700 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
3653 const int kMaxInlineLength = 100; | 3701 const int kMaxInlineLength = 100; |
3654 Label slowcase; | 3702 Label slowcase; |
3655 Label done; | 3703 Label done; |
3656 __ mov(ebx, Operand(esp, kPointerSize * 3)); | 3704 __ mov(ebx, Operand(esp, kPointerSize * 3)); |
3657 __ JumpIfNotSmi(ebx, &slowcase); | 3705 __ JumpIfNotSmi(ebx, &slowcase); |
3658 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength))); | 3706 __ cmp(ebx, Immediate(Smi::FromInt(kMaxInlineLength))); |
3659 __ j(above, &slowcase); | 3707 __ j(above, &slowcase); |
3660 // Smi-tagging is equivalent to multiplying by 2. | 3708 // Smi-tagging is equivalent to multiplying by 2. |
3661 STATIC_ASSERT(kSmiTag == 0); | 3709 STATIC_ASSERT(kSmiTag == 0); |
3662 STATIC_ASSERT(kSmiTagSize == 1); | 3710 STATIC_ASSERT(kSmiTagSize == 1); |
3663 // Allocate RegExpResult followed by FixedArray with size in ebx. | 3711 // Allocate RegExpResult followed by FixedArray with size in ebx. |
3664 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] | 3712 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] |
3665 // Elements: [Map][Length][..elements..] | 3713 // Elements: [Map][Length][..elements..] |
3666 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize, | 3714 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize, |
3667 times_half_pointer_size, | 3715 times_half_pointer_size, |
3668 ebx, // In: Number of elements (times 2, being a smi) | 3716 ebx, // In: Number of elements (times 2, being a smi) |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3708 // Fill contents of fixed-array with the-hole. | 3756 // Fill contents of fixed-array with the-hole. |
3709 __ SmiUntag(ecx); | 3757 __ SmiUntag(ecx); |
3710 __ mov(edx, Immediate(factory->the_hole_value())); | 3758 __ mov(edx, Immediate(factory->the_hole_value())); |
3711 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); | 3759 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); |
3712 // Fill fixed array elements with hole. | 3760 // Fill fixed array elements with hole. |
3713 // eax: JSArray. | 3761 // eax: JSArray. |
3714 // ecx: Number of elements to fill. | 3762 // ecx: Number of elements to fill. |
3715 // ebx: Start of elements in FixedArray. | 3763 // ebx: Start of elements in FixedArray. |
3716 // edx: the hole. | 3764 // edx: the hole. |
3717 Label loop; | 3765 Label loop; |
3718 __ test(ecx, Operand(ecx)); | 3766 __ test(ecx, ecx); |
3719 __ bind(&loop); | 3767 __ bind(&loop); |
3720 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero. | 3768 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero. |
3721 __ sub(Operand(ecx), Immediate(1)); | 3769 __ sub(ecx, Immediate(1)); |
3722 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx); | 3770 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx); |
3723 __ jmp(&loop); | 3771 __ jmp(&loop); |
3724 | 3772 |
3725 __ bind(&done); | 3773 __ bind(&done); |
3726 __ ret(3 * kPointerSize); | 3774 __ ret(3 * kPointerSize); |
3727 | 3775 |
3728 __ bind(&slowcase); | 3776 __ bind(&slowcase); |
3729 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); | 3777 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); |
3730 } | 3778 } |
3731 | 3779 |
(...skipping 13 matching lines...) Expand all Loading... |
3745 // Load the number string cache. | 3793 // Load the number string cache. |
3746 ExternalReference roots_address = | 3794 ExternalReference roots_address = |
3747 ExternalReference::roots_address(masm->isolate()); | 3795 ExternalReference::roots_address(masm->isolate()); |
3748 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); | 3796 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); |
3749 __ mov(number_string_cache, | 3797 __ mov(number_string_cache, |
3750 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 3798 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
3751 // Make the hash mask from the length of the number string cache. It | 3799 // Make the hash mask from the length of the number string cache. It |
3752 // contains two elements (number and string) for each cache entry. | 3800 // contains two elements (number and string) for each cache entry. |
3753 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); | 3801 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
3754 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. | 3802 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. |
3755 __ sub(Operand(mask), Immediate(1)); // Make mask. | 3803 __ sub(mask, Immediate(1)); // Make mask. |
3756 | 3804 |
3757 // Calculate the entry in the number string cache. The hash value in the | 3805 // Calculate the entry in the number string cache. The hash value in the |
3758 // number string cache for smis is just the smi value, and the hash for | 3806 // number string cache for smis is just the smi value, and the hash for |
3759 // doubles is the xor of the upper and lower words. See | 3807 // doubles is the xor of the upper and lower words. See |
3760 // Heap::GetNumberStringCache. | 3808 // Heap::GetNumberStringCache. |
3761 Label smi_hash_calculated; | 3809 Label smi_hash_calculated; |
3762 Label load_result_from_cache; | 3810 Label load_result_from_cache; |
3763 if (object_is_smi) { | 3811 if (object_is_smi) { |
3764 __ mov(scratch, object); | 3812 __ mov(scratch, object); |
3765 __ SmiUntag(scratch); | 3813 __ SmiUntag(scratch); |
3766 } else { | 3814 } else { |
3767 Label not_smi; | 3815 Label not_smi; |
3768 STATIC_ASSERT(kSmiTag == 0); | 3816 STATIC_ASSERT(kSmiTag == 0); |
3769 __ JumpIfNotSmi(object, ¬_smi, Label::kNear); | 3817 __ JumpIfNotSmi(object, ¬_smi, Label::kNear); |
3770 __ mov(scratch, object); | 3818 __ mov(scratch, object); |
3771 __ SmiUntag(scratch); | 3819 __ SmiUntag(scratch); |
3772 __ jmp(&smi_hash_calculated, Label::kNear); | 3820 __ jmp(&smi_hash_calculated, Label::kNear); |
3773 __ bind(¬_smi); | 3821 __ bind(¬_smi); |
3774 __ cmp(FieldOperand(object, HeapObject::kMapOffset), | 3822 __ cmp(FieldOperand(object, HeapObject::kMapOffset), |
3775 masm->isolate()->factory()->heap_number_map()); | 3823 masm->isolate()->factory()->heap_number_map()); |
3776 __ j(not_equal, not_found); | 3824 __ j(not_equal, not_found); |
3777 STATIC_ASSERT(8 == kDoubleSize); | 3825 STATIC_ASSERT(8 == kDoubleSize); |
3778 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset)); | 3826 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset)); |
3779 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); | 3827 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); |
3780 // Object is heap number and hash is now in scratch. Calculate cache index. | 3828 // Object is heap number and hash is now in scratch. Calculate cache index. |
3781 __ and_(scratch, Operand(mask)); | 3829 __ and_(scratch, mask); |
3782 Register index = scratch; | 3830 Register index = scratch; |
3783 Register probe = mask; | 3831 Register probe = mask; |
3784 __ mov(probe, | 3832 __ mov(probe, |
3785 FieldOperand(number_string_cache, | 3833 FieldOperand(number_string_cache, |
3786 index, | 3834 index, |
3787 times_twice_pointer_size, | 3835 times_twice_pointer_size, |
3788 FixedArray::kHeaderSize)); | 3836 FixedArray::kHeaderSize)); |
3789 __ JumpIfSmi(probe, not_found); | 3837 __ JumpIfSmi(probe, not_found); |
3790 if (CpuFeatures::IsSupported(SSE2)) { | 3838 if (CpuFeatures::IsSupported(SSE2)) { |
3791 CpuFeatures::Scope fscope(SSE2); | 3839 CpuFeatures::Scope fscope(SSE2); |
3792 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); | 3840 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); |
3793 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); | 3841 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); |
3794 __ ucomisd(xmm0, xmm1); | 3842 __ ucomisd(xmm0, xmm1); |
3795 } else { | 3843 } else { |
3796 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset)); | 3844 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset)); |
3797 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset)); | 3845 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset)); |
3798 __ FCmp(); | 3846 __ FCmp(); |
3799 } | 3847 } |
3800 __ j(parity_even, not_found); // Bail out if NaN is involved. | 3848 __ j(parity_even, not_found); // Bail out if NaN is involved. |
3801 __ j(not_equal, not_found); // The cache did not contain this value. | 3849 __ j(not_equal, not_found); // The cache did not contain this value. |
3802 __ jmp(&load_result_from_cache, Label::kNear); | 3850 __ jmp(&load_result_from_cache, Label::kNear); |
3803 } | 3851 } |
3804 | 3852 |
3805 __ bind(&smi_hash_calculated); | 3853 __ bind(&smi_hash_calculated); |
3806 // Object is smi and hash is now in scratch. Calculate cache index. | 3854 // Object is smi and hash is now in scratch. Calculate cache index. |
3807 __ and_(scratch, Operand(mask)); | 3855 __ and_(scratch, mask); |
3808 Register index = scratch; | 3856 Register index = scratch; |
3809 // Check if the entry is the smi we are looking for. | 3857 // Check if the entry is the smi we are looking for. |
3810 __ cmp(object, | 3858 __ cmp(object, |
3811 FieldOperand(number_string_cache, | 3859 FieldOperand(number_string_cache, |
3812 index, | 3860 index, |
3813 times_twice_pointer_size, | 3861 times_twice_pointer_size, |
3814 FixedArray::kHeaderSize)); | 3862 FixedArray::kHeaderSize)); |
3815 __ j(not_equal, not_found); | 3863 __ j(not_equal, not_found); |
3816 | 3864 |
3817 // Get the result from the cache. | 3865 // Get the result from the cache. |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3849 } | 3897 } |
3850 | 3898 |
3851 void CompareStub::Generate(MacroAssembler* masm) { | 3899 void CompareStub::Generate(MacroAssembler* masm) { |
3852 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 3900 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
3853 | 3901 |
3854 Label check_unequal_objects; | 3902 Label check_unequal_objects; |
3855 | 3903 |
3856 // Compare two smis if required. | 3904 // Compare two smis if required. |
3857 if (include_smi_compare_) { | 3905 if (include_smi_compare_) { |
3858 Label non_smi, smi_done; | 3906 Label non_smi, smi_done; |
3859 __ mov(ecx, Operand(edx)); | 3907 __ mov(ecx, edx); |
3860 __ or_(ecx, Operand(eax)); | 3908 __ or_(ecx, eax); |
3861 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); | 3909 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); |
3862 __ sub(edx, Operand(eax)); // Return on the result of the subtraction. | 3910 __ sub(edx, eax); // Return on the result of the subtraction. |
3863 __ j(no_overflow, &smi_done, Label::kNear); | 3911 __ j(no_overflow, &smi_done, Label::kNear); |
3864 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. | 3912 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. |
3865 __ bind(&smi_done); | 3913 __ bind(&smi_done); |
3866 __ mov(eax, edx); | 3914 __ mov(eax, edx); |
3867 __ ret(0); | 3915 __ ret(0); |
3868 __ bind(&non_smi); | 3916 __ bind(&non_smi); |
3869 } else if (FLAG_debug_code) { | 3917 } else if (FLAG_debug_code) { |
3870 __ mov(ecx, Operand(edx)); | 3918 __ mov(ecx, edx); |
3871 __ or_(ecx, Operand(eax)); | 3919 __ or_(ecx, eax); |
3872 __ test(ecx, Immediate(kSmiTagMask)); | 3920 __ test(ecx, Immediate(kSmiTagMask)); |
3873 __ Assert(not_zero, "Unexpected smi operands."); | 3921 __ Assert(not_zero, "Unexpected smi operands."); |
3874 } | 3922 } |
3875 | 3923 |
3876 // NOTICE! This code is only reached after a smi-fast-case check, so | 3924 // NOTICE! This code is only reached after a smi-fast-case check, so |
3877 // it is certain that at least one operand isn't a smi. | 3925 // it is certain that at least one operand isn't a smi. |
3878 | 3926 |
3879 // Identical objects can be compared fast, but there are some tricky cases | 3927 // Identical objects can be compared fast, but there are some tricky cases |
3880 // for NaN and undefined. | 3928 // for NaN and undefined. |
3881 { | 3929 { |
3882 Label not_identical; | 3930 Label not_identical; |
3883 __ cmp(eax, Operand(edx)); | 3931 __ cmp(eax, edx); |
3884 __ j(not_equal, ¬_identical); | 3932 __ j(not_equal, ¬_identical); |
3885 | 3933 |
3886 if (cc_ != equal) { | 3934 if (cc_ != equal) { |
3887 // Check for undefined. undefined OP undefined is false even though | 3935 // Check for undefined. undefined OP undefined is false even though |
3888 // undefined == undefined. | 3936 // undefined == undefined. |
3889 Label check_for_nan; | 3937 Label check_for_nan; |
3890 __ cmp(edx, masm->isolate()->factory()->undefined_value()); | 3938 __ cmp(edx, masm->isolate()->factory()->undefined_value()); |
3891 __ j(not_equal, &check_for_nan, Label::kNear); | 3939 __ j(not_equal, &check_for_nan, Label::kNear); |
3892 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_)))); | 3940 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_)))); |
3893 __ ret(0); | 3941 __ ret(0); |
(...skipping 28 matching lines...) Expand all Loading... |
3922 // Read top bits of double representation (second word of value). | 3970 // Read top bits of double representation (second word of value). |
3923 | 3971 |
3924 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e., | 3972 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e., |
3925 // all bits in the mask are set. We only need to check the word | 3973 // all bits in the mask are set. We only need to check the word |
3926 // that contains the exponent and high bit of the mantissa. | 3974 // that contains the exponent and high bit of the mantissa. |
3927 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0); | 3975 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0); |
3928 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset)); | 3976 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset)); |
3929 __ Set(eax, Immediate(0)); | 3977 __ Set(eax, Immediate(0)); |
3930 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost | 3978 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost |
3931 // bits. | 3979 // bits. |
3932 __ add(edx, Operand(edx)); | 3980 __ add(edx, edx); |
3933 __ cmp(edx, kQuietNaNHighBitsMask << 1); | 3981 __ cmp(edx, kQuietNaNHighBitsMask << 1); |
3934 if (cc_ == equal) { | 3982 if (cc_ == equal) { |
3935 STATIC_ASSERT(EQUAL != 1); | 3983 STATIC_ASSERT(EQUAL != 1); |
3936 __ setcc(above_equal, eax); | 3984 __ setcc(above_equal, eax); |
3937 __ ret(0); | 3985 __ ret(0); |
3938 } else { | 3986 } else { |
3939 Label nan; | 3987 Label nan; |
3940 __ j(above_equal, &nan, Label::kNear); | 3988 __ j(above_equal, &nan, Label::kNear); |
3941 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 3989 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
3942 __ ret(0); | 3990 __ ret(0); |
(...skipping 13 matching lines...) Expand all Loading... |
3956 Label not_smis; | 4004 Label not_smis; |
3957 // If we're doing a strict equality comparison, we don't have to do | 4005 // If we're doing a strict equality comparison, we don't have to do |
3958 // type conversion, so we generate code to do fast comparison for objects | 4006 // type conversion, so we generate code to do fast comparison for objects |
3959 // and oddballs. Non-smi numbers and strings still go through the usual | 4007 // and oddballs. Non-smi numbers and strings still go through the usual |
3960 // slow-case code. | 4008 // slow-case code. |
3961 // If either is a Smi (we know that not both are), then they can only | 4009 // If either is a Smi (we know that not both are), then they can only |
3962 // be equal if the other is a HeapNumber. If so, use the slow case. | 4010 // be equal if the other is a HeapNumber. If so, use the slow case. |
3963 STATIC_ASSERT(kSmiTag == 0); | 4011 STATIC_ASSERT(kSmiTag == 0); |
3964 ASSERT_EQ(0, Smi::FromInt(0)); | 4012 ASSERT_EQ(0, Smi::FromInt(0)); |
3965 __ mov(ecx, Immediate(kSmiTagMask)); | 4013 __ mov(ecx, Immediate(kSmiTagMask)); |
3966 __ and_(ecx, Operand(eax)); | 4014 __ and_(ecx, eax); |
3967 __ test(ecx, Operand(edx)); | 4015 __ test(ecx, edx); |
3968 __ j(not_zero, ¬_smis, Label::kNear); | 4016 __ j(not_zero, ¬_smis, Label::kNear); |
3969 // One operand is a smi. | 4017 // One operand is a smi. |
3970 | 4018 |
3971 // Check whether the non-smi is a heap number. | 4019 // Check whether the non-smi is a heap number. |
3972 STATIC_ASSERT(kSmiTagMask == 1); | 4020 STATIC_ASSERT(kSmiTagMask == 1); |
3973 // ecx still holds eax & kSmiTag, which is either zero or one. | 4021 // ecx still holds eax & kSmiTag, which is either zero or one. |
3974 __ sub(Operand(ecx), Immediate(0x01)); | 4022 __ sub(ecx, Immediate(0x01)); |
3975 __ mov(ebx, edx); | 4023 __ mov(ebx, edx); |
3976 __ xor_(ebx, Operand(eax)); | 4024 __ xor_(ebx, eax); |
3977 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx. | 4025 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. |
3978 __ xor_(ebx, Operand(eax)); | 4026 __ xor_(ebx, eax); |
3979 // if eax was smi, ebx is now edx, else eax. | 4027 // if eax was smi, ebx is now edx, else eax. |
3980 | 4028 |
3981 // Check if the non-smi operand is a heap number. | 4029 // Check if the non-smi operand is a heap number. |
3982 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), | 4030 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), |
3983 Immediate(masm->isolate()->factory()->heap_number_map())); | 4031 Immediate(masm->isolate()->factory()->heap_number_map())); |
3984 // If heap number, handle it in the slow case. | 4032 // If heap number, handle it in the slow case. |
3985 __ j(equal, &slow, Label::kNear); | 4033 __ j(equal, &slow, Label::kNear); |
3986 // Return non-equal (ebx is not zero) | 4034 // Return non-equal (ebx is not zero) |
3987 __ mov(eax, ebx); | 4035 __ mov(eax, ebx); |
3988 __ ret(0); | 4036 __ ret(0); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4030 CpuFeatures::Scope use_cmov(CMOV); | 4078 CpuFeatures::Scope use_cmov(CMOV); |
4031 | 4079 |
4032 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison); | 4080 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison); |
4033 __ ucomisd(xmm0, xmm1); | 4081 __ ucomisd(xmm0, xmm1); |
4034 | 4082 |
4035 // Don't base result on EFLAGS when a NaN is involved. | 4083 // Don't base result on EFLAGS when a NaN is involved. |
4036 __ j(parity_even, &unordered, Label::kNear); | 4084 __ j(parity_even, &unordered, Label::kNear); |
4037 // Return a result of -1, 0, or 1, based on EFLAGS. | 4085 // Return a result of -1, 0, or 1, based on EFLAGS. |
4038 __ mov(eax, 0); // equal | 4086 __ mov(eax, 0); // equal |
4039 __ mov(ecx, Immediate(Smi::FromInt(1))); | 4087 __ mov(ecx, Immediate(Smi::FromInt(1))); |
4040 __ cmov(above, eax, Operand(ecx)); | 4088 __ cmov(above, eax, ecx); |
4041 __ mov(ecx, Immediate(Smi::FromInt(-1))); | 4089 __ mov(ecx, Immediate(Smi::FromInt(-1))); |
4042 __ cmov(below, eax, Operand(ecx)); | 4090 __ cmov(below, eax, ecx); |
4043 __ ret(0); | 4091 __ ret(0); |
4044 } else { | 4092 } else { |
4045 FloatingPointHelper::CheckFloatOperands( | 4093 FloatingPointHelper::CheckFloatOperands( |
4046 masm, &non_number_comparison, ebx); | 4094 masm, &non_number_comparison, ebx); |
4047 FloatingPointHelper::LoadFloatOperand(masm, eax); | 4095 FloatingPointHelper::LoadFloatOperand(masm, eax); |
4048 FloatingPointHelper::LoadFloatOperand(masm, edx); | 4096 FloatingPointHelper::LoadFloatOperand(masm, edx); |
4049 __ FCmp(); | 4097 __ FCmp(); |
4050 | 4098 |
4051 // Don't base result on EFLAGS when a NaN is involved. | 4099 // Don't base result on EFLAGS when a NaN is involved. |
4052 __ j(parity_even, &unordered, Label::kNear); | 4100 __ j(parity_even, &unordered, Label::kNear); |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4191 __ cmp(scratch, kSymbolTag | kStringTag); | 4239 __ cmp(scratch, kSymbolTag | kStringTag); |
4192 __ j(not_equal, label); | 4240 __ j(not_equal, label); |
4193 } | 4241 } |
4194 | 4242 |
4195 | 4243 |
4196 void StackCheckStub::Generate(MacroAssembler* masm) { | 4244 void StackCheckStub::Generate(MacroAssembler* masm) { |
4197 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); | 4245 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); |
4198 } | 4246 } |
4199 | 4247 |
4200 | 4248 |
| 4249 void CallFunctionStub::FinishCode(Code* code) { |
| 4250 code->set_has_function_cache(RecordCallTarget()); |
| 4251 } |
| 4252 |
| 4253 |
| 4254 void CallFunctionStub::Clear(Heap* heap, Address address) { |
| 4255 ASSERT(Memory::uint8_at(address + kPointerSize) == Assembler::kTestEaxByte); |
| 4256 // 1 ~ size of the test eax opcode. |
| 4257 Object* cell = Memory::Object_at(address + kPointerSize + 1); |
| 4258 // Low-level because clearing happens during GC. |
| 4259 reinterpret_cast<JSGlobalPropertyCell*>(cell)->set_value( |
| 4260 RawUninitializedSentinel(heap)); |
| 4261 } |
| 4262 |
| 4263 |
| 4264 Object* CallFunctionStub::GetCachedValue(Address address) { |
| 4265 ASSERT(Memory::uint8_at(address + kPointerSize) == Assembler::kTestEaxByte); |
| 4266 // 1 ~ size of the test eax opcode. |
| 4267 Object* cell = Memory::Object_at(address + kPointerSize + 1); |
| 4268 return JSGlobalPropertyCell::cast(cell)->value(); |
| 4269 } |
| 4270 |
| 4271 |
4201 void CallFunctionStub::Generate(MacroAssembler* masm) { | 4272 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 4273 Isolate* isolate = masm->isolate(); |
4202 Label slow, non_function; | 4274 Label slow, non_function; |
4203 | 4275 |
4204 // The receiver might implicitly be the global object. This is | 4276 // The receiver might implicitly be the global object. This is |
4205 // indicated by passing the hole as the receiver to the call | 4277 // indicated by passing the hole as the receiver to the call |
4206 // function stub. | 4278 // function stub. |
4207 if (ReceiverMightBeImplicit()) { | 4279 if (ReceiverMightBeImplicit()) { |
4208 Label call; | 4280 Label receiver_ok; |
4209 // Get the receiver from the stack. | 4281 // Get the receiver from the stack. |
4210 // +1 ~ return address | 4282 // +1 ~ return address |
4211 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); | 4283 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); |
4212 // Call as function is indicated with the hole. | 4284 // Call as function is indicated with the hole. |
4213 __ cmp(eax, masm->isolate()->factory()->the_hole_value()); | 4285 __ cmp(eax, isolate->factory()->the_hole_value()); |
4214 __ j(not_equal, &call, Label::kNear); | 4286 __ j(not_equal, &receiver_ok, Label::kNear); |
4215 // Patch the receiver on the stack with the global receiver object. | 4287 // Patch the receiver on the stack with the global receiver object. |
4216 __ mov(ebx, GlobalObjectOperand()); | 4288 __ mov(ebx, GlobalObjectOperand()); |
4217 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); | 4289 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); |
4218 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx); | 4290 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx); |
4219 __ bind(&call); | 4291 __ bind(&receiver_ok); |
4220 } | 4292 } |
4221 | 4293 |
4222 // Get the function to call from the stack. | 4294 // Get the function to call from the stack. |
4223 // +2 ~ receiver, return address | 4295 // +2 ~ receiver, return address |
4224 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize)); | 4296 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize)); |
4225 | 4297 |
4226 // Check that the function really is a JavaScript function. | 4298 // Check that the function really is a JavaScript function. |
4227 __ JumpIfSmi(edi, &non_function); | 4299 __ JumpIfSmi(edi, &non_function); |
4228 // Goto slow case if we do not have a function. | 4300 // Goto slow case if we do not have a function. |
4229 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); | 4301 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
4230 __ j(not_equal, &slow); | 4302 __ j(not_equal, &slow); |
4231 | 4303 |
| 4304 if (RecordCallTarget()) { |
| 4305 // Cache the called function in a global property cell in the |
| 4306 // instruction stream after the call. Cache states are uninitialized, |
| 4307 // monomorphic (indicated by a JSFunction), and megamorphic. |
| 4308 Label initialize, call; |
| 4309 // Load the cache cell address into ebx and the cache state into ecx. |
| 4310 __ mov(ebx, Operand(esp, 0)); // Return address. |
| 4311 __ mov(ebx, Operand(ebx, 1)); // 1 ~ sizeof 'test eax' opcode in bytes. |
| 4312 __ mov(ecx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset)); |
| 4313 |
| 4314 // A monomorphic cache hit or an already megamorphic state: invoke the |
| 4315 // function without changing the state. |
| 4316 __ cmp(ecx, edi); |
| 4317 __ j(equal, &call, Label::kNear); |
| 4318 __ cmp(ecx, Immediate(MegamorphicSentinel(isolate))); |
| 4319 __ j(equal, &call, Label::kNear); |
| 4320 |
| 4321 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 4322 // megamorphic. |
| 4323 __ cmp(ecx, Immediate(UninitializedSentinel(isolate))); |
| 4324 __ j(equal, &initialize, Label::kNear); |
| 4325 // MegamorphicSentinel is a root so no write-barrier is needed. |
| 4326 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), |
| 4327 Immediate(MegamorphicSentinel(isolate))); |
| 4328 __ jmp(&call, Label::kNear); |
| 4329 |
| 4330 // An uninitialized cache is patched with the function. |
| 4331 __ bind(&initialize); |
| 4332 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), edi); |
| 4333 __ mov(ecx, edi); |
| 4334 __ RecordWriteField(ebx, |
| 4335 JSGlobalPropertyCell::kValueOffset, |
| 4336 ecx, |
| 4337 edx, |
| 4338 kDontSaveFPRegs, |
| 4339 OMIT_REMEMBERED_SET, // Cells are rescanned. |
| 4340 OMIT_SMI_CHECK); |
| 4341 |
| 4342 __ bind(&call); |
| 4343 } |
| 4344 |
4232 // Fast-case: Just invoke the function. | 4345 // Fast-case: Just invoke the function. |
4233 ParameterCount actual(argc_); | 4346 ParameterCount actual(argc_); |
4234 | 4347 |
4235 if (ReceiverMightBeImplicit()) { | 4348 if (ReceiverMightBeImplicit()) { |
4236 Label call_as_function; | 4349 Label call_as_function; |
4237 __ cmp(eax, masm->isolate()->factory()->the_hole_value()); | 4350 __ cmp(eax, isolate->factory()->the_hole_value()); |
4238 __ j(equal, &call_as_function); | 4351 __ j(equal, &call_as_function); |
4239 __ InvokeFunction(edi, | 4352 __ InvokeFunction(edi, |
4240 actual, | 4353 actual, |
4241 JUMP_FUNCTION, | 4354 JUMP_FUNCTION, |
4242 NullCallWrapper(), | 4355 NullCallWrapper(), |
4243 CALL_AS_METHOD); | 4356 CALL_AS_METHOD); |
4244 __ bind(&call_as_function); | 4357 __ bind(&call_as_function); |
4245 } | 4358 } |
4246 __ InvokeFunction(edi, | 4359 __ InvokeFunction(edi, |
4247 actual, | 4360 actual, |
4248 JUMP_FUNCTION, | 4361 JUMP_FUNCTION, |
4249 NullCallWrapper(), | 4362 NullCallWrapper(), |
4250 CALL_AS_FUNCTION); | 4363 CALL_AS_FUNCTION); |
4251 | 4364 |
4252 // Slow-case: Non-function called. | 4365 // Slow-case: Non-function called. |
4253 __ bind(&slow); | 4366 __ bind(&slow); |
| 4367 if (RecordCallTarget()) { |
| 4368 // If there is a call target cache, mark it megamorphic in the |
| 4369 // non-function case. |
| 4370 __ mov(ebx, Operand(esp, 0)); |
| 4371 __ mov(ebx, Operand(ebx, 1)); |
| 4372 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), |
| 4373 Immediate(MegamorphicSentinel(isolate))); |
| 4374 } |
4254 // Check for function proxy. | 4375 // Check for function proxy. |
4255 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); | 4376 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
4256 __ j(not_equal, &non_function); | 4377 __ j(not_equal, &non_function); |
4257 __ pop(ecx); | 4378 __ pop(ecx); |
4258 __ push(edi); // put proxy as additional argument under return address | 4379 __ push(edi); // put proxy as additional argument under return address |
4259 __ push(ecx); | 4380 __ push(ecx); |
4260 __ Set(eax, Immediate(argc_ + 1)); | 4381 __ Set(eax, Immediate(argc_ + 1)); |
4261 __ Set(ebx, Immediate(0)); | 4382 __ Set(ebx, Immediate(0)); |
4262 __ SetCallKind(ecx, CALL_AS_FUNCTION); | 4383 __ SetCallKind(ecx, CALL_AS_FUNCTION); |
4263 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); | 4384 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); |
4264 { | 4385 { |
4265 Handle<Code> adaptor = | 4386 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
4266 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
4267 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 4387 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
4268 } | 4388 } |
4269 | 4389 |
4270 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 4390 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
4271 // of the original receiver from the call site). | 4391 // of the original receiver from the call site). |
4272 __ bind(&non_function); | 4392 __ bind(&non_function); |
4273 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); | 4393 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); |
4274 __ Set(eax, Immediate(argc_)); | 4394 __ Set(eax, Immediate(argc_)); |
4275 __ Set(ebx, Immediate(0)); | 4395 __ Set(ebx, Immediate(0)); |
4276 __ SetCallKind(ecx, CALL_AS_METHOD); | 4396 __ SetCallKind(ecx, CALL_AS_METHOD); |
4277 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); | 4397 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
4278 Handle<Code> adaptor = | 4398 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
4279 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
4280 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 4399 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
4281 } | 4400 } |
4282 | 4401 |
4283 | 4402 |
4284 bool CEntryStub::NeedsImmovableCode() { | 4403 bool CEntryStub::NeedsImmovableCode() { |
4285 return false; | 4404 return false; |
4286 } | 4405 } |
4287 | 4406 |
4288 | 4407 |
| 4408 bool CEntryStub::IsPregenerated() { |
| 4409 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && |
| 4410 result_size_ == 1; |
| 4411 } |
| 4412 |
| 4413 |
| 4414 void CodeStub::GenerateStubsAheadOfTime() { |
| 4415 CEntryStub::GenerateAheadOfTime(); |
| 4416 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(); |
| 4417 // It is important that the store buffer overflow stubs are generated first. |
| 4418 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(); |
| 4419 } |
| 4420 |
| 4421 |
| 4422 void CodeStub::GenerateFPStubs() { |
| 4423 CEntryStub save_doubles(1, kSaveFPRegs); |
| 4424 Handle<Code> code = save_doubles.GetCode(); |
| 4425 code->set_is_pregenerated(true); |
| 4426 code->GetIsolate()->set_fp_stubs_generated(true); |
| 4427 } |
| 4428 |
| 4429 |
| 4430 void CEntryStub::GenerateAheadOfTime() { |
| 4431 CEntryStub stub(1, kDontSaveFPRegs); |
| 4432 Handle<Code> code = stub.GetCode(); |
| 4433 code->set_is_pregenerated(true); |
| 4434 } |
| 4435 |
| 4436 |
4289 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 4437 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
4290 __ Throw(eax); | 4438 __ Throw(eax); |
4291 } | 4439 } |
4292 | 4440 |
4293 | 4441 |
4294 void CEntryStub::GenerateCore(MacroAssembler* masm, | 4442 void CEntryStub::GenerateCore(MacroAssembler* masm, |
4295 Label* throw_normal_exception, | 4443 Label* throw_normal_exception, |
4296 Label* throw_termination_exception, | 4444 Label* throw_termination_exception, |
4297 Label* throw_out_of_memory_exception, | 4445 Label* throw_out_of_memory_exception, |
4298 bool do_gc, | 4446 bool do_gc, |
(...skipping 26 matching lines...) Expand all Loading... |
4325 ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); | 4473 ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); |
4326 if (always_allocate_scope) { | 4474 if (always_allocate_scope) { |
4327 __ inc(Operand::StaticVariable(scope_depth)); | 4475 __ inc(Operand::StaticVariable(scope_depth)); |
4328 } | 4476 } |
4329 | 4477 |
4330 // Call C function. | 4478 // Call C function. |
4331 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. | 4479 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. |
4332 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. | 4480 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. |
4333 __ mov(Operand(esp, 2 * kPointerSize), | 4481 __ mov(Operand(esp, 2 * kPointerSize), |
4334 Immediate(ExternalReference::isolate_address())); | 4482 Immediate(ExternalReference::isolate_address())); |
4335 __ call(Operand(ebx)); | 4483 __ call(ebx); |
4336 // Result is in eax or edx:eax - do not destroy these registers! | 4484 // Result is in eax or edx:eax - do not destroy these registers! |
4337 | 4485 |
4338 if (always_allocate_scope) { | 4486 if (always_allocate_scope) { |
4339 __ dec(Operand::StaticVariable(scope_depth)); | 4487 __ dec(Operand::StaticVariable(scope_depth)); |
4340 } | 4488 } |
4341 | 4489 |
4342 // Make sure we're not trying to return 'the hole' from the runtime | 4490 // Make sure we're not trying to return 'the hole' from the runtime |
4343 // call as this may lead to crashes in the IC code later. | 4491 // call as this may lead to crashes in the IC code later. |
4344 if (FLAG_debug_code) { | 4492 if (FLAG_debug_code) { |
4345 Label okay; | 4493 Label okay; |
(...skipping 11 matching lines...) Expand all Loading... |
4357 __ test(ecx, Immediate(kFailureTagMask)); | 4505 __ test(ecx, Immediate(kFailureTagMask)); |
4358 __ j(zero, &failure_returned); | 4506 __ j(zero, &failure_returned); |
4359 | 4507 |
4360 ExternalReference pending_exception_address( | 4508 ExternalReference pending_exception_address( |
4361 Isolate::kPendingExceptionAddress, masm->isolate()); | 4509 Isolate::kPendingExceptionAddress, masm->isolate()); |
4362 | 4510 |
4363 // Check that there is no pending exception, otherwise we | 4511 // Check that there is no pending exception, otherwise we |
4364 // should have returned some failure value. | 4512 // should have returned some failure value. |
4365 if (FLAG_debug_code) { | 4513 if (FLAG_debug_code) { |
4366 __ push(edx); | 4514 __ push(edx); |
4367 __ mov(edx, Operand::StaticVariable( | 4515 __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); |
4368 ExternalReference::the_hole_value_location(masm->isolate()))); | |
4369 Label okay; | 4516 Label okay; |
4370 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); | 4517 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); |
4371 // Cannot use check here as it attempts to generate call into runtime. | 4518 // Cannot use check here as it attempts to generate call into runtime. |
4372 __ j(equal, &okay, Label::kNear); | 4519 __ j(equal, &okay, Label::kNear); |
4373 __ int3(); | 4520 __ int3(); |
4374 __ bind(&okay); | 4521 __ bind(&okay); |
4375 __ pop(edx); | 4522 __ pop(edx); |
4376 } | 4523 } |
4377 | 4524 |
4378 // Exit the JavaScript to C++ exit frame. | 4525 // Exit the JavaScript to C++ exit frame. |
4379 __ LeaveExitFrame(save_doubles_); | 4526 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); |
4380 __ ret(0); | 4527 __ ret(0); |
4381 | 4528 |
4382 // Handling of failure. | 4529 // Handling of failure. |
4383 __ bind(&failure_returned); | 4530 __ bind(&failure_returned); |
4384 | 4531 |
4385 Label retry; | 4532 Label retry; |
4386 // If the returned exception is RETRY_AFTER_GC continue at retry label | 4533 // If the returned exception is RETRY_AFTER_GC continue at retry label |
4387 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); | 4534 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); |
4388 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); | 4535 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); |
4389 __ j(zero, &retry, Label::kNear); | 4536 __ j(zero, &retry, Label::kNear); |
4390 | 4537 |
4391 // Special handling of out of memory exceptions. | 4538 // Special handling of out of memory exceptions. |
4392 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); | 4539 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); |
4393 __ j(equal, throw_out_of_memory_exception); | 4540 __ j(equal, throw_out_of_memory_exception); |
4394 | 4541 |
4395 // Retrieve the pending exception and clear the variable. | 4542 // Retrieve the pending exception and clear the variable. |
4396 ExternalReference the_hole_location = | |
4397 ExternalReference::the_hole_value_location(masm->isolate()); | |
4398 __ mov(eax, Operand::StaticVariable(pending_exception_address)); | 4543 __ mov(eax, Operand::StaticVariable(pending_exception_address)); |
4399 __ mov(edx, Operand::StaticVariable(the_hole_location)); | 4544 __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); |
4400 __ mov(Operand::StaticVariable(pending_exception_address), edx); | 4545 __ mov(Operand::StaticVariable(pending_exception_address), edx); |
4401 | 4546 |
4402 // Special handling of termination exceptions which are uncatchable | 4547 // Special handling of termination exceptions which are uncatchable |
4403 // by javascript code. | 4548 // by javascript code. |
4404 __ cmp(eax, masm->isolate()->factory()->termination_exception()); | 4549 __ cmp(eax, masm->isolate()->factory()->termination_exception()); |
4405 __ j(equal, throw_termination_exception); | 4550 __ j(equal, throw_termination_exception); |
4406 | 4551 |
4407 // Handle normal exception. | 4552 // Handle normal exception. |
4408 __ jmp(throw_normal_exception); | 4553 __ jmp(throw_normal_exception); |
4409 | 4554 |
(...skipping 14 matching lines...) Expand all Loading... |
4424 // ebp: frame pointer (restored after C call) | 4569 // ebp: frame pointer (restored after C call) |
4425 // esp: stack pointer (restored after C call) | 4570 // esp: stack pointer (restored after C call) |
4426 // esi: current context (C callee-saved) | 4571 // esi: current context (C callee-saved) |
4427 // edi: JS function of the caller (C callee-saved) | 4572 // edi: JS function of the caller (C callee-saved) |
4428 | 4573 |
4429 // NOTE: Invocations of builtins may return failure objects instead | 4574 // NOTE: Invocations of builtins may return failure objects instead |
4430 // of a proper result. The builtin entry handles this by performing | 4575 // of a proper result. The builtin entry handles this by performing |
4431 // a garbage collection and retrying the builtin (twice). | 4576 // a garbage collection and retrying the builtin (twice). |
4432 | 4577 |
4433 // Enter the exit frame that transitions from JavaScript to C++. | 4578 // Enter the exit frame that transitions from JavaScript to C++. |
4434 __ EnterExitFrame(save_doubles_); | 4579 __ EnterExitFrame(save_doubles_ == kSaveFPRegs); |
4435 | 4580 |
4436 // eax: result parameter for PerformGC, if any (setup below) | 4581 // eax: result parameter for PerformGC, if any (setup below) |
4437 // ebx: pointer to builtin function (C callee-saved) | 4582 // ebx: pointer to builtin function (C callee-saved) |
4438 // ebp: frame pointer (restored after C call) | 4583 // ebp: frame pointer (restored after C call) |
4439 // esp: stack pointer (restored after C call) | 4584 // esp: stack pointer (restored after C call) |
4440 // edi: number of arguments including receiver (C callee-saved) | 4585 // edi: number of arguments including receiver (C callee-saved) |
4441 // esi: argv pointer (C callee-saved) | 4586 // esi: argv pointer (C callee-saved) |
4442 | 4587 |
4443 Label throw_normal_exception; | 4588 Label throw_normal_exception; |
4444 Label throw_termination_exception; | 4589 Label throw_termination_exception; |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4480 GenerateThrowTOS(masm); | 4625 GenerateThrowTOS(masm); |
4481 } | 4626 } |
4482 | 4627 |
4483 | 4628 |
4484 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { | 4629 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { |
4485 Label invoke, exit; | 4630 Label invoke, exit; |
4486 Label not_outermost_js, not_outermost_js_2; | 4631 Label not_outermost_js, not_outermost_js_2; |
4487 | 4632 |
4488 // Setup frame. | 4633 // Setup frame. |
4489 __ push(ebp); | 4634 __ push(ebp); |
4490 __ mov(ebp, Operand(esp)); | 4635 __ mov(ebp, esp); |
4491 | 4636 |
4492 // Push marker in two places. | 4637 // Push marker in two places. |
4493 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 4638 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
4494 __ push(Immediate(Smi::FromInt(marker))); // context slot | 4639 __ push(Immediate(Smi::FromInt(marker))); // context slot |
4495 __ push(Immediate(Smi::FromInt(marker))); // function slot | 4640 __ push(Immediate(Smi::FromInt(marker))); // function slot |
4496 // Save callee-saved registers (C calling conventions). | 4641 // Save callee-saved registers (C calling conventions). |
4497 __ push(edi); | 4642 __ push(edi); |
4498 __ push(esi); | 4643 __ push(esi); |
4499 __ push(ebx); | 4644 __ push(ebx); |
4500 | 4645 |
(...skipping 23 matching lines...) Expand all Loading... |
4524 masm->isolate()); | 4669 masm->isolate()); |
4525 __ mov(Operand::StaticVariable(pending_exception), eax); | 4670 __ mov(Operand::StaticVariable(pending_exception), eax); |
4526 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception())); | 4671 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception())); |
4527 __ jmp(&exit); | 4672 __ jmp(&exit); |
4528 | 4673 |
4529 // Invoke: Link this frame into the handler chain. | 4674 // Invoke: Link this frame into the handler chain. |
4530 __ bind(&invoke); | 4675 __ bind(&invoke); |
4531 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); | 4676 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); |
4532 | 4677 |
4533 // Clear any pending exceptions. | 4678 // Clear any pending exceptions. |
4534 ExternalReference the_hole_location = | 4679 __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value())); |
4535 ExternalReference::the_hole_value_location(masm->isolate()); | |
4536 __ mov(edx, Operand::StaticVariable(the_hole_location)); | |
4537 __ mov(Operand::StaticVariable(pending_exception), edx); | 4680 __ mov(Operand::StaticVariable(pending_exception), edx); |
4538 | 4681 |
4539 // Fake a receiver (NULL). | 4682 // Fake a receiver (NULL). |
4540 __ push(Immediate(0)); // receiver | 4683 __ push(Immediate(0)); // receiver |
4541 | 4684 |
4542 // Invoke the function by calling through JS entry trampoline | 4685 // Invoke the function by calling through JS entry trampoline |
4543 // builtin and pop the faked function when we return. Notice that we | 4686 // builtin and pop the faked function when we return. Notice that we |
4544 // cannot store a reference to the trampoline code directly in this | 4687 // cannot store a reference to the trampoline code directly in this |
4545 // stub, because the builtin stubs may not have been generated yet. | 4688 // stub, because the builtin stubs may not have been generated yet. |
4546 if (is_construct) { | 4689 if (is_construct) { |
4547 ExternalReference construct_entry( | 4690 ExternalReference construct_entry( |
4548 Builtins::kJSConstructEntryTrampoline, | 4691 Builtins::kJSConstructEntryTrampoline, |
4549 masm->isolate()); | 4692 masm->isolate()); |
4550 __ mov(edx, Immediate(construct_entry)); | 4693 __ mov(edx, Immediate(construct_entry)); |
4551 } else { | 4694 } else { |
4552 ExternalReference entry(Builtins::kJSEntryTrampoline, | 4695 ExternalReference entry(Builtins::kJSEntryTrampoline, |
4553 masm->isolate()); | 4696 masm->isolate()); |
4554 __ mov(edx, Immediate(entry)); | 4697 __ mov(edx, Immediate(entry)); |
4555 } | 4698 } |
4556 __ mov(edx, Operand(edx, 0)); // deref address | 4699 __ mov(edx, Operand(edx, 0)); // deref address |
4557 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); | 4700 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); |
4558 __ call(Operand(edx)); | 4701 __ call(edx); |
4559 | 4702 |
4560 // Unlink this frame from the handler chain. | 4703 // Unlink this frame from the handler chain. |
4561 __ PopTryHandler(); | 4704 __ PopTryHandler(); |
4562 | 4705 |
4563 __ bind(&exit); | 4706 __ bind(&exit); |
4564 // Check if the current stack frame is marked as the outermost JS frame. | 4707 // Check if the current stack frame is marked as the outermost JS frame. |
4565 __ pop(ebx); | 4708 __ pop(ebx); |
4566 __ cmp(Operand(ebx), | 4709 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); |
4567 Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); | |
4568 __ j(not_equal, ¬_outermost_js_2); | 4710 __ j(not_equal, ¬_outermost_js_2); |
4569 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); | 4711 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); |
4570 __ bind(¬_outermost_js_2); | 4712 __ bind(¬_outermost_js_2); |
4571 | 4713 |
4572 // Restore the top frame descriptor from the stack. | 4714 // Restore the top frame descriptor from the stack. |
4573 __ pop(Operand::StaticVariable(ExternalReference( | 4715 __ pop(Operand::StaticVariable(ExternalReference( |
4574 Isolate::kCEntryFPAddress, | 4716 Isolate::kCEntryFPAddress, |
4575 masm->isolate()))); | 4717 masm->isolate()))); |
4576 | 4718 |
4577 // Restore callee-saved registers (C calling conventions). | 4719 // Restore callee-saved registers (C calling conventions). |
4578 __ pop(ebx); | 4720 __ pop(ebx); |
4579 __ pop(esi); | 4721 __ pop(esi); |
4580 __ pop(edi); | 4722 __ pop(edi); |
4581 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers | 4723 __ add(esp, Immediate(2 * kPointerSize)); // remove markers |
4582 | 4724 |
4583 // Restore frame pointer and return. | 4725 // Restore frame pointer and return. |
4584 __ pop(ebp); | 4726 __ pop(ebp); |
4585 __ ret(0); | 4727 __ ret(0); |
4586 } | 4728 } |
4587 | 4729 |
4588 | 4730 |
4589 // Generate stub code for instanceof. | 4731 // Generate stub code for instanceof. |
4590 // This code can patch a call site inlined cache of the instance of check, | 4732 // This code can patch a call site inlined cache of the instance of check, |
4591 // which looks like this. | 4733 // which looks like this. |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4687 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)"); | 4829 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)"); |
4688 } | 4830 } |
4689 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); | 4831 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); |
4690 } | 4832 } |
4691 | 4833 |
4692 // Loop through the prototype chain of the object looking for the function | 4834 // Loop through the prototype chain of the object looking for the function |
4693 // prototype. | 4835 // prototype. |
4694 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); | 4836 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); |
4695 Label loop, is_instance, is_not_instance; | 4837 Label loop, is_instance, is_not_instance; |
4696 __ bind(&loop); | 4838 __ bind(&loop); |
4697 __ cmp(scratch, Operand(prototype)); | 4839 __ cmp(scratch, prototype); |
4698 __ j(equal, &is_instance, Label::kNear); | 4840 __ j(equal, &is_instance, Label::kNear); |
4699 Factory* factory = masm->isolate()->factory(); | 4841 Factory* factory = masm->isolate()->factory(); |
4700 __ cmp(Operand(scratch), Immediate(factory->null_value())); | 4842 __ cmp(scratch, Immediate(factory->null_value())); |
4701 __ j(equal, &is_not_instance, Label::kNear); | 4843 __ j(equal, &is_not_instance, Label::kNear); |
4702 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 4844 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
4703 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 4845 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); |
4704 __ jmp(&loop); | 4846 __ jmp(&loop); |
4705 | 4847 |
4706 __ bind(&is_instance); | 4848 __ bind(&is_instance); |
4707 if (!HasCallSiteInlineCheck()) { | 4849 if (!HasCallSiteInlineCheck()) { |
4708 __ Set(eax, Immediate(0)); | 4850 __ Set(eax, Immediate(0)); |
4709 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 4851 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
4710 __ mov(Operand::StaticArray(scratch, | 4852 __ mov(Operand::StaticArray(scratch, |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4781 if (HasArgsInRegisters()) { | 4923 if (HasArgsInRegisters()) { |
4782 // Push arguments below return address. | 4924 // Push arguments below return address. |
4783 __ pop(scratch); | 4925 __ pop(scratch); |
4784 __ push(object); | 4926 __ push(object); |
4785 __ push(function); | 4927 __ push(function); |
4786 __ push(scratch); | 4928 __ push(scratch); |
4787 } | 4929 } |
4788 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 4930 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
4789 } else { | 4931 } else { |
4790 // Call the builtin and convert 0/1 to true/false. | 4932 // Call the builtin and convert 0/1 to true/false. |
4791 __ EnterInternalFrame(); | 4933 { |
4792 __ push(object); | 4934 FrameScope scope(masm, StackFrame::INTERNAL); |
4793 __ push(function); | 4935 __ push(object); |
4794 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | 4936 __ push(function); |
4795 __ LeaveInternalFrame(); | 4937 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
| 4938 } |
4796 Label true_value, done; | 4939 Label true_value, done; |
4797 __ test(eax, Operand(eax)); | 4940 __ test(eax, eax); |
4798 __ j(zero, &true_value, Label::kNear); | 4941 __ j(zero, &true_value, Label::kNear); |
4799 __ mov(eax, factory->false_value()); | 4942 __ mov(eax, factory->false_value()); |
4800 __ jmp(&done, Label::kNear); | 4943 __ jmp(&done, Label::kNear); |
4801 __ bind(&true_value); | 4944 __ bind(&true_value); |
4802 __ mov(eax, factory->true_value()); | 4945 __ mov(eax, factory->true_value()); |
4803 __ bind(&done); | 4946 __ bind(&done); |
4804 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | 4947 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
4805 } | 4948 } |
4806 } | 4949 } |
4807 | 4950 |
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5103 } | 5246 } |
5104 } | 5247 } |
5105 | 5248 |
5106 // Both arguments are strings. | 5249 // Both arguments are strings. |
5107 // eax: first string | 5250 // eax: first string |
5108 // edx: second string | 5251 // edx: second string |
5109 // Check if either of the strings are empty. In that case return the other. | 5252 // Check if either of the strings are empty. In that case return the other. |
5110 Label second_not_zero_length, both_not_zero_length; | 5253 Label second_not_zero_length, both_not_zero_length; |
5111 __ mov(ecx, FieldOperand(edx, String::kLengthOffset)); | 5254 __ mov(ecx, FieldOperand(edx, String::kLengthOffset)); |
5112 STATIC_ASSERT(kSmiTag == 0); | 5255 STATIC_ASSERT(kSmiTag == 0); |
5113 __ test(ecx, Operand(ecx)); | 5256 __ test(ecx, ecx); |
5114 __ j(not_zero, &second_not_zero_length, Label::kNear); | 5257 __ j(not_zero, &second_not_zero_length, Label::kNear); |
5115 // Second string is empty, result is first string which is already in eax. | 5258 // Second string is empty, result is first string which is already in eax. |
5116 Counters* counters = masm->isolate()->counters(); | 5259 Counters* counters = masm->isolate()->counters(); |
5117 __ IncrementCounter(counters->string_add_native(), 1); | 5260 __ IncrementCounter(counters->string_add_native(), 1); |
5118 __ ret(2 * kPointerSize); | 5261 __ ret(2 * kPointerSize); |
5119 __ bind(&second_not_zero_length); | 5262 __ bind(&second_not_zero_length); |
5120 __ mov(ebx, FieldOperand(eax, String::kLengthOffset)); | 5263 __ mov(ebx, FieldOperand(eax, String::kLengthOffset)); |
5121 STATIC_ASSERT(kSmiTag == 0); | 5264 STATIC_ASSERT(kSmiTag == 0); |
5122 __ test(ebx, Operand(ebx)); | 5265 __ test(ebx, ebx); |
5123 __ j(not_zero, &both_not_zero_length, Label::kNear); | 5266 __ j(not_zero, &both_not_zero_length, Label::kNear); |
5124 // First string is empty, result is second string which is in edx. | 5267 // First string is empty, result is second string which is in edx. |
5125 __ mov(eax, edx); | 5268 __ mov(eax, edx); |
5126 __ IncrementCounter(counters->string_add_native(), 1); | 5269 __ IncrementCounter(counters->string_add_native(), 1); |
5127 __ ret(2 * kPointerSize); | 5270 __ ret(2 * kPointerSize); |
5128 | 5271 |
5129 // Both strings are non-empty. | 5272 // Both strings are non-empty. |
5130 // eax: first string | 5273 // eax: first string |
5131 // ebx: length of first string as a smi | 5274 // ebx: length of first string as a smi |
5132 // ecx: length of second string as a smi | 5275 // ecx: length of second string as a smi |
5133 // edx: second string | 5276 // edx: second string |
5134 // Look at the length of the result of adding the two strings. | 5277 // Look at the length of the result of adding the two strings. |
5135 Label string_add_flat_result, longer_than_two; | 5278 Label string_add_flat_result, longer_than_two; |
5136 __ bind(&both_not_zero_length); | 5279 __ bind(&both_not_zero_length); |
5137 __ add(ebx, Operand(ecx)); | 5280 __ add(ebx, ecx); |
5138 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength); | 5281 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength); |
5139 // Handle exceptionally long strings in the runtime system. | 5282 // Handle exceptionally long strings in the runtime system. |
5140 __ j(overflow, &string_add_runtime); | 5283 __ j(overflow, &string_add_runtime); |
5141 // Use the symbol table when adding two one character strings, as it | 5284 // Use the symbol table when adding two one character strings, as it |
5142 // helps later optimizations to return a symbol here. | 5285 // helps later optimizations to return a symbol here. |
5143 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2))); | 5286 __ cmp(ebx, Immediate(Smi::FromInt(2))); |
5144 __ j(not_equal, &longer_than_two); | 5287 __ j(not_equal, &longer_than_two); |
5145 | 5288 |
5146 // Check that both strings are non-external ascii strings. | 5289 // Check that both strings are non-external ascii strings. |
5147 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, | 5290 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, |
5148 &string_add_runtime); | 5291 &string_add_runtime); |
5149 | 5292 |
5150 // Get the two characters forming the new string. | 5293 // Get the two characters forming the new string. |
5151 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize)); | 5294 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize)); |
5152 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize)); | 5295 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize)); |
5153 | 5296 |
(...skipping 16 matching lines...) Expand all Loading... |
5170 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize)); | 5313 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize)); |
5171 __ bind(&make_two_character_string_no_reload); | 5314 __ bind(&make_two_character_string_no_reload); |
5172 __ IncrementCounter(counters->string_add_make_two_char(), 1); | 5315 __ IncrementCounter(counters->string_add_make_two_char(), 1); |
5173 __ AllocateAsciiString(eax, // Result. | 5316 __ AllocateAsciiString(eax, // Result. |
5174 2, // Length. | 5317 2, // Length. |
5175 edi, // Scratch 1. | 5318 edi, // Scratch 1. |
5176 edx, // Scratch 2. | 5319 edx, // Scratch 2. |
5177 &string_add_runtime); | 5320 &string_add_runtime); |
5178 // Pack both characters in ebx. | 5321 // Pack both characters in ebx. |
5179 __ shl(ecx, kBitsPerByte); | 5322 __ shl(ecx, kBitsPerByte); |
5180 __ or_(ebx, Operand(ecx)); | 5323 __ or_(ebx, ecx); |
5181 // Set the characters in the new string. | 5324 // Set the characters in the new string. |
5182 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx); | 5325 __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx); |
5183 __ IncrementCounter(counters->string_add_native(), 1); | 5326 __ IncrementCounter(counters->string_add_native(), 1); |
5184 __ ret(2 * kPointerSize); | 5327 __ ret(2 * kPointerSize); |
5185 | 5328 |
5186 __ bind(&longer_than_two); | 5329 __ bind(&longer_than_two); |
5187 // Check if resulting string will be flat. | 5330 // Check if resulting string will be flat. |
5188 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength))); | 5331 __ cmp(ebx, Immediate(Smi::FromInt(String::kMinNonFlatLength))); |
5189 __ j(below, &string_add_flat_result); | 5332 __ j(below, &string_add_flat_result); |
5190 | 5333 |
5191 // If result is not supposed to be flat allocate a cons string object. If both | 5334 // If result is not supposed to be flat allocate a cons string object. If both |
5192 // strings are ascii the result is an ascii cons string. | 5335 // strings are ascii the result is an ascii cons string. |
5193 Label non_ascii, allocated, ascii_data; | 5336 Label non_ascii, allocated, ascii_data; |
5194 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset)); | 5337 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset)); |
5195 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset)); | 5338 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset)); |
5196 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); | 5339 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); |
5197 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset)); | 5340 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset)); |
5198 __ and_(ecx, Operand(edi)); | 5341 __ and_(ecx, edi); |
5199 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); | 5342 STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); |
5200 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); | 5343 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); |
5201 __ test(ecx, Immediate(kStringEncodingMask)); | 5344 __ test(ecx, Immediate(kStringEncodingMask)); |
5202 __ j(zero, &non_ascii); | 5345 __ j(zero, &non_ascii); |
5203 __ bind(&ascii_data); | 5346 __ bind(&ascii_data); |
5204 // Allocate an acsii cons string. | 5347 // Allocate an acsii cons string. |
5205 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime); | 5348 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime); |
5206 __ bind(&allocated); | 5349 __ bind(&allocated); |
5207 // Fill the fields of the cons string. | 5350 // Fill the fields of the cons string. |
5208 if (FLAG_debug_code) __ AbortIfNotSmi(ebx); | 5351 if (FLAG_debug_code) __ AbortIfNotSmi(ebx); |
5209 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx); | 5352 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx); |
5210 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset), | 5353 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset), |
5211 Immediate(String::kEmptyHashField)); | 5354 Immediate(String::kEmptyHashField)); |
5212 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax); | 5355 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax); |
5213 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx); | 5356 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx); |
5214 __ mov(eax, ecx); | 5357 __ mov(eax, ecx); |
5215 __ IncrementCounter(counters->string_add_native(), 1); | 5358 __ IncrementCounter(counters->string_add_native(), 1); |
5216 __ ret(2 * kPointerSize); | 5359 __ ret(2 * kPointerSize); |
5217 __ bind(&non_ascii); | 5360 __ bind(&non_ascii); |
5218 // At least one of the strings is two-byte. Check whether it happens | 5361 // At least one of the strings is two-byte. Check whether it happens |
5219 // to contain only ascii characters. | 5362 // to contain only ascii characters. |
5220 // ecx: first instance type AND second instance type. | 5363 // ecx: first instance type AND second instance type. |
5221 // edi: second instance type. | 5364 // edi: second instance type. |
5222 __ test(ecx, Immediate(kAsciiDataHintMask)); | 5365 __ test(ecx, Immediate(kAsciiDataHintMask)); |
5223 __ j(not_zero, &ascii_data); | 5366 __ j(not_zero, &ascii_data); |
5224 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); | 5367 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); |
5225 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); | 5368 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); |
5226 __ xor_(edi, Operand(ecx)); | 5369 __ xor_(edi, ecx); |
5227 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0); | 5370 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0); |
5228 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag); | 5371 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag); |
5229 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag); | 5372 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag); |
5230 __ j(equal, &ascii_data); | 5373 __ j(equal, &ascii_data); |
5231 // Allocate a two byte cons string. | 5374 // Allocate a two byte cons string. |
5232 __ AllocateTwoByteConsString(ecx, edi, no_reg, &string_add_runtime); | 5375 __ AllocateTwoByteConsString(ecx, edi, no_reg, &string_add_runtime); |
5233 __ jmp(&allocated); | 5376 __ jmp(&allocated); |
5234 | 5377 |
5235 // Handle creating a flat result. First check that both strings are not | 5378 // Handle creating a flat result. First check that both strings are not |
5236 // external strings. | 5379 // external strings. |
(...skipping 27 matching lines...) Expand all Loading... |
5264 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); | 5407 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); |
5265 __ j(zero, &string_add_runtime); | 5408 __ j(zero, &string_add_runtime); |
5266 | 5409 |
5267 // Both strings are ascii strings. As they are short they are both flat. | 5410 // Both strings are ascii strings. As they are short they are both flat. |
5268 // ebx: length of resulting flat string as a smi | 5411 // ebx: length of resulting flat string as a smi |
5269 __ SmiUntag(ebx); | 5412 __ SmiUntag(ebx); |
5270 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime); | 5413 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime); |
5271 // eax: result string | 5414 // eax: result string |
5272 __ mov(ecx, eax); | 5415 __ mov(ecx, eax); |
5273 // Locate first character of result. | 5416 // Locate first character of result. |
5274 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5417 __ add(ecx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5275 // Load first argument and locate first character. | 5418 // Load first argument and locate first character. |
5276 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 5419 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
5277 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); | 5420 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); |
5278 __ SmiUntag(edi); | 5421 __ SmiUntag(edi); |
5279 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5422 __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5280 // eax: result string | 5423 // eax: result string |
5281 // ecx: first character of result | 5424 // ecx: first character of result |
5282 // edx: first char of first argument | 5425 // edx: first char of first argument |
5283 // edi: length of first argument | 5426 // edi: length of first argument |
5284 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true); | 5427 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true); |
5285 // Load second argument and locate first character. | 5428 // Load second argument and locate first character. |
5286 __ mov(edx, Operand(esp, 1 * kPointerSize)); | 5429 __ mov(edx, Operand(esp, 1 * kPointerSize)); |
5287 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); | 5430 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); |
5288 __ SmiUntag(edi); | 5431 __ SmiUntag(edi); |
5289 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5432 __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5290 // eax: result string | 5433 // eax: result string |
5291 // ecx: next character of result | 5434 // ecx: next character of result |
5292 // edx: first char of second argument | 5435 // edx: first char of second argument |
5293 // edi: length of second argument | 5436 // edi: length of second argument |
5294 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true); | 5437 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true); |
5295 __ IncrementCounter(counters->string_add_native(), 1); | 5438 __ IncrementCounter(counters->string_add_native(), 1); |
5296 __ ret(2 * kPointerSize); | 5439 __ ret(2 * kPointerSize); |
5297 | 5440 |
5298 // Handle creating a flat two byte result. | 5441 // Handle creating a flat two byte result. |
5299 // eax: first string - known to be two byte | 5442 // eax: first string - known to be two byte |
5300 // ebx: length of resulting flat string as a smi | 5443 // ebx: length of resulting flat string as a smi |
5301 // edx: second string | 5444 // edx: second string |
5302 __ bind(&non_ascii_string_add_flat_result); | 5445 __ bind(&non_ascii_string_add_flat_result); |
5303 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); | 5446 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); |
5304 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); | 5447 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); |
5305 __ j(not_zero, &string_add_runtime); | 5448 __ j(not_zero, &string_add_runtime); |
5306 // Both strings are two byte strings. As they are short they are both | 5449 // Both strings are two byte strings. As they are short they are both |
5307 // flat. | 5450 // flat. |
5308 __ SmiUntag(ebx); | 5451 __ SmiUntag(ebx); |
5309 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime); | 5452 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime); |
5310 // eax: result string | 5453 // eax: result string |
5311 __ mov(ecx, eax); | 5454 __ mov(ecx, eax); |
5312 // Locate first character of result. | 5455 // Locate first character of result. |
5313 __ add(Operand(ecx), | 5456 __ add(ecx, |
5314 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 5457 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
5315 // Load first argument and locate first character. | 5458 // Load first argument and locate first character. |
5316 __ mov(edx, Operand(esp, 2 * kPointerSize)); | 5459 __ mov(edx, Operand(esp, 2 * kPointerSize)); |
5317 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); | 5460 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); |
5318 __ SmiUntag(edi); | 5461 __ SmiUntag(edi); |
5319 __ add(Operand(edx), | 5462 __ add(edx, |
5320 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 5463 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
5321 // eax: result string | 5464 // eax: result string |
5322 // ecx: first character of result | 5465 // ecx: first character of result |
5323 // edx: first char of first argument | 5466 // edx: first char of first argument |
5324 // edi: length of first argument | 5467 // edi: length of first argument |
5325 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false); | 5468 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false); |
5326 // Load second argument and locate first character. | 5469 // Load second argument and locate first character. |
5327 __ mov(edx, Operand(esp, 1 * kPointerSize)); | 5470 __ mov(edx, Operand(esp, 1 * kPointerSize)); |
5328 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); | 5471 __ mov(edi, FieldOperand(edx, String::kLengthOffset)); |
5329 __ SmiUntag(edi); | 5472 __ SmiUntag(edi); |
5330 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5473 __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5331 // eax: result string | 5474 // eax: result string |
5332 // ecx: next character of result | 5475 // ecx: next character of result |
5333 // edx: first char of second argument | 5476 // edx: first char of second argument |
5334 // edi: length of second argument | 5477 // edi: length of second argument |
5335 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false); | 5478 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false); |
5336 __ IncrementCounter(counters->string_add_native(), 1); | 5479 __ IncrementCounter(counters->string_add_native(), 1); |
5337 __ ret(2 * kPointerSize); | 5480 __ ret(2 * kPointerSize); |
5338 | 5481 |
5339 // Just jump to runtime to add the two strings. | 5482 // Just jump to runtime to add the two strings. |
5340 __ bind(&string_add_runtime); | 5483 __ bind(&string_add_runtime); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5396 Register count, | 5539 Register count, |
5397 Register scratch, | 5540 Register scratch, |
5398 bool ascii) { | 5541 bool ascii) { |
5399 Label loop; | 5542 Label loop; |
5400 __ bind(&loop); | 5543 __ bind(&loop); |
5401 // This loop just copies one character at a time, as it is only used for very | 5544 // This loop just copies one character at a time, as it is only used for very |
5402 // short strings. | 5545 // short strings. |
5403 if (ascii) { | 5546 if (ascii) { |
5404 __ mov_b(scratch, Operand(src, 0)); | 5547 __ mov_b(scratch, Operand(src, 0)); |
5405 __ mov_b(Operand(dest, 0), scratch); | 5548 __ mov_b(Operand(dest, 0), scratch); |
5406 __ add(Operand(src), Immediate(1)); | 5549 __ add(src, Immediate(1)); |
5407 __ add(Operand(dest), Immediate(1)); | 5550 __ add(dest, Immediate(1)); |
5408 } else { | 5551 } else { |
5409 __ mov_w(scratch, Operand(src, 0)); | 5552 __ mov_w(scratch, Operand(src, 0)); |
5410 __ mov_w(Operand(dest, 0), scratch); | 5553 __ mov_w(Operand(dest, 0), scratch); |
5411 __ add(Operand(src), Immediate(2)); | 5554 __ add(src, Immediate(2)); |
5412 __ add(Operand(dest), Immediate(2)); | 5555 __ add(dest, Immediate(2)); |
5413 } | 5556 } |
5414 __ sub(Operand(count), Immediate(1)); | 5557 __ sub(count, Immediate(1)); |
5415 __ j(not_zero, &loop); | 5558 __ j(not_zero, &loop); |
5416 } | 5559 } |
5417 | 5560 |
5418 | 5561 |
5419 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, | 5562 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm, |
5420 Register dest, | 5563 Register dest, |
5421 Register src, | 5564 Register src, |
5422 Register count, | 5565 Register count, |
5423 Register scratch, | 5566 Register scratch, |
5424 bool ascii) { | 5567 bool ascii) { |
5425 // Copy characters using rep movs of doublewords. | 5568 // Copy characters using rep movs of doublewords. |
5426 // The destination is aligned on a 4 byte boundary because we are | 5569 // The destination is aligned on a 4 byte boundary because we are |
5427 // copying to the beginning of a newly allocated string. | 5570 // copying to the beginning of a newly allocated string. |
5428 ASSERT(dest.is(edi)); // rep movs destination | 5571 ASSERT(dest.is(edi)); // rep movs destination |
5429 ASSERT(src.is(esi)); // rep movs source | 5572 ASSERT(src.is(esi)); // rep movs source |
5430 ASSERT(count.is(ecx)); // rep movs count | 5573 ASSERT(count.is(ecx)); // rep movs count |
5431 ASSERT(!scratch.is(dest)); | 5574 ASSERT(!scratch.is(dest)); |
5432 ASSERT(!scratch.is(src)); | 5575 ASSERT(!scratch.is(src)); |
5433 ASSERT(!scratch.is(count)); | 5576 ASSERT(!scratch.is(count)); |
5434 | 5577 |
5435 // Nothing to do for zero characters. | 5578 // Nothing to do for zero characters. |
5436 Label done; | 5579 Label done; |
5437 __ test(count, Operand(count)); | 5580 __ test(count, count); |
5438 __ j(zero, &done); | 5581 __ j(zero, &done); |
5439 | 5582 |
5440 // Make count the number of bytes to copy. | 5583 // Make count the number of bytes to copy. |
5441 if (!ascii) { | 5584 if (!ascii) { |
5442 __ shl(count, 1); | 5585 __ shl(count, 1); |
5443 } | 5586 } |
5444 | 5587 |
5445 // Don't enter the rep movs if there are less than 4 bytes to copy. | 5588 // Don't enter the rep movs if there are less than 4 bytes to copy. |
5446 Label last_bytes; | 5589 Label last_bytes; |
5447 __ test(count, Immediate(~3)); | 5590 __ test(count, Immediate(~3)); |
5448 __ j(zero, &last_bytes, Label::kNear); | 5591 __ j(zero, &last_bytes, Label::kNear); |
5449 | 5592 |
5450 // Copy from edi to esi using rep movs instruction. | 5593 // Copy from edi to esi using rep movs instruction. |
5451 __ mov(scratch, count); | 5594 __ mov(scratch, count); |
5452 __ sar(count, 2); // Number of doublewords to copy. | 5595 __ sar(count, 2); // Number of doublewords to copy. |
5453 __ cld(); | 5596 __ cld(); |
5454 __ rep_movs(); | 5597 __ rep_movs(); |
5455 | 5598 |
5456 // Find number of bytes left. | 5599 // Find number of bytes left. |
5457 __ mov(count, scratch); | 5600 __ mov(count, scratch); |
5458 __ and_(count, 3); | 5601 __ and_(count, 3); |
5459 | 5602 |
5460 // Check if there are more bytes to copy. | 5603 // Check if there are more bytes to copy. |
5461 __ bind(&last_bytes); | 5604 __ bind(&last_bytes); |
5462 __ test(count, Operand(count)); | 5605 __ test(count, count); |
5463 __ j(zero, &done); | 5606 __ j(zero, &done); |
5464 | 5607 |
5465 // Copy remaining characters. | 5608 // Copy remaining characters. |
5466 Label loop; | 5609 Label loop; |
5467 __ bind(&loop); | 5610 __ bind(&loop); |
5468 __ mov_b(scratch, Operand(src, 0)); | 5611 __ mov_b(scratch, Operand(src, 0)); |
5469 __ mov_b(Operand(dest, 0), scratch); | 5612 __ mov_b(Operand(dest, 0), scratch); |
5470 __ add(Operand(src), Immediate(1)); | 5613 __ add(src, Immediate(1)); |
5471 __ add(Operand(dest), Immediate(1)); | 5614 __ add(dest, Immediate(1)); |
5472 __ sub(Operand(count), Immediate(1)); | 5615 __ sub(count, Immediate(1)); |
5473 __ j(not_zero, &loop); | 5616 __ j(not_zero, &loop); |
5474 | 5617 |
5475 __ bind(&done); | 5618 __ bind(&done); |
5476 } | 5619 } |
5477 | 5620 |
5478 | 5621 |
5479 void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, | 5622 void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, |
5480 Register c1, | 5623 Register c1, |
5481 Register c2, | 5624 Register c2, |
5482 Register scratch1, | 5625 Register scratch1, |
5483 Register scratch2, | 5626 Register scratch2, |
5484 Register scratch3, | 5627 Register scratch3, |
5485 Label* not_probed, | 5628 Label* not_probed, |
5486 Label* not_found) { | 5629 Label* not_found) { |
5487 // Register scratch3 is the general scratch register in this function. | 5630 // Register scratch3 is the general scratch register in this function. |
5488 Register scratch = scratch3; | 5631 Register scratch = scratch3; |
5489 | 5632 |
5490 // Make sure that both characters are not digits as such strings has a | 5633 // Make sure that both characters are not digits as such strings has a |
5491 // different hash algorithm. Don't try to look for these in the symbol table. | 5634 // different hash algorithm. Don't try to look for these in the symbol table. |
5492 Label not_array_index; | 5635 Label not_array_index; |
5493 __ mov(scratch, c1); | 5636 __ mov(scratch, c1); |
5494 __ sub(Operand(scratch), Immediate(static_cast<int>('0'))); | 5637 __ sub(scratch, Immediate(static_cast<int>('0'))); |
5495 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0'))); | 5638 __ cmp(scratch, Immediate(static_cast<int>('9' - '0'))); |
5496 __ j(above, ¬_array_index, Label::kNear); | 5639 __ j(above, ¬_array_index, Label::kNear); |
5497 __ mov(scratch, c2); | 5640 __ mov(scratch, c2); |
5498 __ sub(Operand(scratch), Immediate(static_cast<int>('0'))); | 5641 __ sub(scratch, Immediate(static_cast<int>('0'))); |
5499 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0'))); | 5642 __ cmp(scratch, Immediate(static_cast<int>('9' - '0'))); |
5500 __ j(below_equal, not_probed); | 5643 __ j(below_equal, not_probed); |
5501 | 5644 |
5502 __ bind(¬_array_index); | 5645 __ bind(¬_array_index); |
5503 // Calculate the two character string hash. | 5646 // Calculate the two character string hash. |
5504 Register hash = scratch1; | 5647 Register hash = scratch1; |
5505 GenerateHashInit(masm, hash, c1, scratch); | 5648 GenerateHashInit(masm, hash, c1, scratch); |
5506 GenerateHashAddCharacter(masm, hash, c2, scratch); | 5649 GenerateHashAddCharacter(masm, hash, c2, scratch); |
5507 GenerateHashGetHash(masm, hash, scratch); | 5650 GenerateHashGetHash(masm, hash, scratch); |
5508 | 5651 |
5509 // Collect the two characters in a register. | 5652 // Collect the two characters in a register. |
5510 Register chars = c1; | 5653 Register chars = c1; |
5511 __ shl(c2, kBitsPerByte); | 5654 __ shl(c2, kBitsPerByte); |
5512 __ or_(chars, Operand(c2)); | 5655 __ or_(chars, c2); |
5513 | 5656 |
5514 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. | 5657 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. |
5515 // hash: hash of two character string. | 5658 // hash: hash of two character string. |
5516 | 5659 |
5517 // Load the symbol table. | 5660 // Load the symbol table. |
5518 Register symbol_table = c2; | 5661 Register symbol_table = c2; |
5519 ExternalReference roots_address = | 5662 ExternalReference roots_address = |
5520 ExternalReference::roots_address(masm->isolate()); | 5663 ExternalReference::roots_address(masm->isolate()); |
5521 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex)); | 5664 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex)); |
5522 __ mov(symbol_table, | 5665 __ mov(symbol_table, |
5523 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5666 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
5524 | 5667 |
5525 // Calculate capacity mask from the symbol table capacity. | 5668 // Calculate capacity mask from the symbol table capacity. |
5526 Register mask = scratch2; | 5669 Register mask = scratch2; |
5527 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset)); | 5670 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset)); |
5528 __ SmiUntag(mask); | 5671 __ SmiUntag(mask); |
5529 __ sub(Operand(mask), Immediate(1)); | 5672 __ sub(mask, Immediate(1)); |
5530 | 5673 |
5531 // Registers | 5674 // Registers |
5532 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. | 5675 // chars: two character string, char 1 in byte 0 and char 2 in byte 1. |
5533 // hash: hash of two character string | 5676 // hash: hash of two character string |
5534 // symbol_table: symbol table | 5677 // symbol_table: symbol table |
5535 // mask: capacity mask | 5678 // mask: capacity mask |
5536 // scratch: - | 5679 // scratch: - |
5537 | 5680 |
5538 // Perform a number of probes in the symbol table. | 5681 // Perform a number of probes in the symbol table. |
5539 static const int kProbes = 4; | 5682 static const int kProbes = 4; |
5540 Label found_in_symbol_table; | 5683 Label found_in_symbol_table; |
5541 Label next_probe[kProbes], next_probe_pop_mask[kProbes]; | 5684 Label next_probe[kProbes], next_probe_pop_mask[kProbes]; |
5542 for (int i = 0; i < kProbes; i++) { | 5685 for (int i = 0; i < kProbes; i++) { |
5543 // Calculate entry in symbol table. | 5686 // Calculate entry in symbol table. |
5544 __ mov(scratch, hash); | 5687 __ mov(scratch, hash); |
5545 if (i > 0) { | 5688 if (i > 0) { |
5546 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i))); | 5689 __ add(scratch, Immediate(SymbolTable::GetProbeOffset(i))); |
5547 } | 5690 } |
5548 __ and_(scratch, Operand(mask)); | 5691 __ and_(scratch, mask); |
5549 | 5692 |
5550 // Load the entry from the symbol table. | 5693 // Load the entry from the symbol table. |
5551 Register candidate = scratch; // Scratch register contains candidate. | 5694 Register candidate = scratch; // Scratch register contains candidate. |
5552 STATIC_ASSERT(SymbolTable::kEntrySize == 1); | 5695 STATIC_ASSERT(SymbolTable::kEntrySize == 1); |
5553 __ mov(candidate, | 5696 __ mov(candidate, |
5554 FieldOperand(symbol_table, | 5697 FieldOperand(symbol_table, |
5555 scratch, | 5698 scratch, |
5556 times_pointer_size, | 5699 times_pointer_size, |
5557 SymbolTable::kElementsStartOffset)); | 5700 SymbolTable::kElementsStartOffset)); |
5558 | 5701 |
(...skipping 16 matching lines...) Expand all Loading... |
5575 | 5718 |
5576 // Check that the candidate is a non-external ascii string. | 5719 // Check that the candidate is a non-external ascii string. |
5577 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset)); | 5720 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset)); |
5578 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); | 5721 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); |
5579 __ JumpIfInstanceTypeIsNotSequentialAscii( | 5722 __ JumpIfInstanceTypeIsNotSequentialAscii( |
5580 temp, temp, &next_probe_pop_mask[i]); | 5723 temp, temp, &next_probe_pop_mask[i]); |
5581 | 5724 |
5582 // Check if the two characters match. | 5725 // Check if the two characters match. |
5583 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize)); | 5726 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize)); |
5584 __ and_(temp, 0x0000ffff); | 5727 __ and_(temp, 0x0000ffff); |
5585 __ cmp(chars, Operand(temp)); | 5728 __ cmp(chars, temp); |
5586 __ j(equal, &found_in_symbol_table); | 5729 __ j(equal, &found_in_symbol_table); |
5587 __ bind(&next_probe_pop_mask[i]); | 5730 __ bind(&next_probe_pop_mask[i]); |
5588 __ pop(mask); | 5731 __ pop(mask); |
5589 __ bind(&next_probe[i]); | 5732 __ bind(&next_probe[i]); |
5590 } | 5733 } |
5591 | 5734 |
5592 // No matching 2 character string found by probing. | 5735 // No matching 2 character string found by probing. |
5593 __ jmp(not_found); | 5736 __ jmp(not_found); |
5594 | 5737 |
5595 // Scratch register contains result when we fall through to here. | 5738 // Scratch register contains result when we fall through to here. |
5596 Register result = scratch; | 5739 Register result = scratch; |
5597 __ bind(&found_in_symbol_table); | 5740 __ bind(&found_in_symbol_table); |
5598 __ pop(mask); // Pop saved mask from the stack. | 5741 __ pop(mask); // Pop saved mask from the stack. |
5599 if (!result.is(eax)) { | 5742 if (!result.is(eax)) { |
5600 __ mov(eax, result); | 5743 __ mov(eax, result); |
5601 } | 5744 } |
5602 } | 5745 } |
5603 | 5746 |
5604 | 5747 |
5605 void StringHelper::GenerateHashInit(MacroAssembler* masm, | 5748 void StringHelper::GenerateHashInit(MacroAssembler* masm, |
5606 Register hash, | 5749 Register hash, |
5607 Register character, | 5750 Register character, |
5608 Register scratch) { | 5751 Register scratch) { |
5609 // hash = character + (character << 10); | 5752 // hash = character + (character << 10); |
5610 __ mov(hash, character); | 5753 __ mov(hash, character); |
5611 __ shl(hash, 10); | 5754 __ shl(hash, 10); |
5612 __ add(hash, Operand(character)); | 5755 __ add(hash, character); |
5613 // hash ^= hash >> 6; | 5756 // hash ^= hash >> 6; |
5614 __ mov(scratch, hash); | 5757 __ mov(scratch, hash); |
5615 __ sar(scratch, 6); | 5758 __ sar(scratch, 6); |
5616 __ xor_(hash, Operand(scratch)); | 5759 __ xor_(hash, scratch); |
5617 } | 5760 } |
5618 | 5761 |
5619 | 5762 |
5620 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm, | 5763 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm, |
5621 Register hash, | 5764 Register hash, |
5622 Register character, | 5765 Register character, |
5623 Register scratch) { | 5766 Register scratch) { |
5624 // hash += character; | 5767 // hash += character; |
5625 __ add(hash, Operand(character)); | 5768 __ add(hash, character); |
5626 // hash += hash << 10; | 5769 // hash += hash << 10; |
5627 __ mov(scratch, hash); | 5770 __ mov(scratch, hash); |
5628 __ shl(scratch, 10); | 5771 __ shl(scratch, 10); |
5629 __ add(hash, Operand(scratch)); | 5772 __ add(hash, scratch); |
5630 // hash ^= hash >> 6; | 5773 // hash ^= hash >> 6; |
5631 __ mov(scratch, hash); | 5774 __ mov(scratch, hash); |
5632 __ sar(scratch, 6); | 5775 __ sar(scratch, 6); |
5633 __ xor_(hash, Operand(scratch)); | 5776 __ xor_(hash, scratch); |
5634 } | 5777 } |
5635 | 5778 |
5636 | 5779 |
5637 void StringHelper::GenerateHashGetHash(MacroAssembler* masm, | 5780 void StringHelper::GenerateHashGetHash(MacroAssembler* masm, |
5638 Register hash, | 5781 Register hash, |
5639 Register scratch) { | 5782 Register scratch) { |
5640 // hash += hash << 3; | 5783 // hash += hash << 3; |
5641 __ mov(scratch, hash); | 5784 __ mov(scratch, hash); |
5642 __ shl(scratch, 3); | 5785 __ shl(scratch, 3); |
5643 __ add(hash, Operand(scratch)); | 5786 __ add(hash, scratch); |
5644 // hash ^= hash >> 11; | 5787 // hash ^= hash >> 11; |
5645 __ mov(scratch, hash); | 5788 __ mov(scratch, hash); |
5646 __ sar(scratch, 11); | 5789 __ sar(scratch, 11); |
5647 __ xor_(hash, Operand(scratch)); | 5790 __ xor_(hash, scratch); |
5648 // hash += hash << 15; | 5791 // hash += hash << 15; |
5649 __ mov(scratch, hash); | 5792 __ mov(scratch, hash); |
5650 __ shl(scratch, 15); | 5793 __ shl(scratch, 15); |
5651 __ add(hash, Operand(scratch)); | 5794 __ add(hash, scratch); |
5652 | 5795 |
5653 // if (hash == 0) hash = 27; | 5796 // if (hash == 0) hash = 27; |
5654 Label hash_not_zero; | 5797 Label hash_not_zero; |
5655 __ test(hash, Operand(hash)); | 5798 __ test(hash, hash); |
5656 __ j(not_zero, &hash_not_zero, Label::kNear); | 5799 __ j(not_zero, &hash_not_zero, Label::kNear); |
5657 __ mov(hash, Immediate(27)); | 5800 __ mov(hash, Immediate(27)); |
5658 __ bind(&hash_not_zero); | 5801 __ bind(&hash_not_zero); |
5659 } | 5802 } |
5660 | 5803 |
5661 | 5804 |
5662 void SubStringStub::Generate(MacroAssembler* masm) { | 5805 void SubStringStub::Generate(MacroAssembler* masm) { |
5663 Label runtime; | 5806 Label runtime; |
5664 | 5807 |
5665 // Stack frame on entry. | 5808 // Stack frame on entry. |
(...skipping 11 matching lines...) Expand all Loading... |
5677 | 5820 |
5678 // eax: string | 5821 // eax: string |
5679 // ebx: instance type | 5822 // ebx: instance type |
5680 | 5823 |
5681 // Calculate length of sub string using the smi values. | 5824 // Calculate length of sub string using the smi values. |
5682 Label result_longer_than_two; | 5825 Label result_longer_than_two; |
5683 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index. | 5826 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index. |
5684 __ JumpIfNotSmi(ecx, &runtime); | 5827 __ JumpIfNotSmi(ecx, &runtime); |
5685 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index. | 5828 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index. |
5686 __ JumpIfNotSmi(edx, &runtime); | 5829 __ JumpIfNotSmi(edx, &runtime); |
5687 __ sub(ecx, Operand(edx)); | 5830 __ sub(ecx, edx); |
5688 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset)); | 5831 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset)); |
5689 Label return_eax; | 5832 Label return_eax; |
5690 __ j(equal, &return_eax); | 5833 __ j(equal, &return_eax); |
5691 // Special handling of sub-strings of length 1 and 2. One character strings | 5834 // Special handling of sub-strings of length 1 and 2. One character strings |
5692 // are handled in the runtime system (looked up in the single character | 5835 // are handled in the runtime system (looked up in the single character |
5693 // cache). Two character strings are looked for in the symbol cache. | 5836 // cache). Two character strings are looked for in the symbol cache. |
5694 __ SmiUntag(ecx); // Result length is no longer smi. | 5837 __ SmiUntag(ecx); // Result length is no longer smi. |
5695 __ cmp(ecx, 2); | 5838 __ cmp(ecx, 2); |
5696 __ j(greater, &result_longer_than_two); | 5839 __ j(greater, &result_longer_than_two); |
5697 __ j(less, &runtime); | 5840 __ j(less, &runtime); |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5809 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat); | 5952 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat); |
5810 | 5953 |
5811 // Allocate the result. | 5954 // Allocate the result. |
5812 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime); | 5955 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime); |
5813 | 5956 |
5814 // eax: result string | 5957 // eax: result string |
5815 // ecx: result string length | 5958 // ecx: result string length |
5816 __ mov(edx, esi); // esi used by following code. | 5959 __ mov(edx, esi); // esi used by following code. |
5817 // Locate first character of result. | 5960 // Locate first character of result. |
5818 __ mov(edi, eax); | 5961 __ mov(edi, eax); |
5819 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5962 __ add(edi, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5820 // Load string argument and locate character of sub string start. | 5963 // Load string argument and locate character of sub string start. |
5821 __ mov(esi, Operand(esp, 3 * kPointerSize)); | 5964 __ mov(esi, Operand(esp, 3 * kPointerSize)); |
5822 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 5965 __ add(esi, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
5823 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from | 5966 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from |
5824 __ SmiUntag(ebx); | 5967 __ SmiUntag(ebx); |
5825 __ add(esi, Operand(ebx)); | 5968 __ add(esi, ebx); |
5826 | 5969 |
5827 // eax: result string | 5970 // eax: result string |
5828 // ecx: result length | 5971 // ecx: result length |
5829 // edx: original value of esi | 5972 // edx: original value of esi |
5830 // edi: first character of result | 5973 // edi: first character of result |
5831 // esi: character of sub string start | 5974 // esi: character of sub string start |
5832 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true); | 5975 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true); |
5833 __ mov(esi, edx); // Restore esi. | 5976 __ mov(esi, edx); // Restore esi. |
5834 Counters* counters = masm->isolate()->counters(); | 5977 Counters* counters = masm->isolate()->counters(); |
5835 __ IncrementCounter(counters->sub_string_native(), 1); | 5978 __ IncrementCounter(counters->sub_string_native(), 1); |
5836 __ ret(3 * kPointerSize); | 5979 __ ret(3 * kPointerSize); |
5837 | 5980 |
5838 __ bind(&non_ascii_flat); | 5981 __ bind(&non_ascii_flat); |
5839 // eax: string | 5982 // eax: string |
5840 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask | 5983 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask |
5841 // ecx: result string length | 5984 // ecx: result string length |
5842 // Check for flat two byte string | 5985 // Check for flat two byte string |
5843 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag); | 5986 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag); |
5844 __ j(not_equal, &runtime); | 5987 __ j(not_equal, &runtime); |
5845 | 5988 |
5846 // Allocate the result. | 5989 // Allocate the result. |
5847 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime); | 5990 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime); |
5848 | 5991 |
5849 // eax: result string | 5992 // eax: result string |
5850 // ecx: result string length | 5993 // ecx: result string length |
5851 __ mov(edx, esi); // esi used by following code. | 5994 __ mov(edx, esi); // esi used by following code. |
5852 // Locate first character of result. | 5995 // Locate first character of result. |
5853 __ mov(edi, eax); | 5996 __ mov(edi, eax); |
5854 __ add(Operand(edi), | 5997 __ add(edi, |
5855 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 5998 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
5856 // Load string argument and locate character of sub string start. | 5999 // Load string argument and locate character of sub string start. |
5857 __ mov(esi, Operand(esp, 3 * kPointerSize)); | 6000 __ mov(esi, Operand(esp, 3 * kPointerSize)); |
5858 __ add(Operand(esi), | 6001 __ add(esi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
5859 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | |
5860 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from | 6002 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from |
5861 // As from is a smi it is 2 times the value which matches the size of a two | 6003 // As from is a smi it is 2 times the value which matches the size of a two |
5862 // byte character. | 6004 // byte character. |
5863 STATIC_ASSERT(kSmiTag == 0); | 6005 STATIC_ASSERT(kSmiTag == 0); |
5864 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); | 6006 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); |
5865 __ add(esi, Operand(ebx)); | 6007 __ add(esi, ebx); |
5866 | 6008 |
5867 // eax: result string | 6009 // eax: result string |
5868 // ecx: result length | 6010 // ecx: result length |
5869 // edx: original value of esi | 6011 // edx: original value of esi |
5870 // edi: first character of result | 6012 // edi: first character of result |
5871 // esi: character of sub string start | 6013 // esi: character of sub string start |
5872 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false); | 6014 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false); |
5873 __ mov(esi, edx); // Restore esi. | 6015 __ mov(esi, edx); // Restore esi. |
5874 | 6016 |
5875 __ bind(&return_eax); | 6017 __ bind(&return_eax); |
(...skipping 19 matching lines...) Expand all Loading... |
5895 __ cmp(length, FieldOperand(right, String::kLengthOffset)); | 6037 __ cmp(length, FieldOperand(right, String::kLengthOffset)); |
5896 __ j(equal, &check_zero_length, Label::kNear); | 6038 __ j(equal, &check_zero_length, Label::kNear); |
5897 __ bind(&strings_not_equal); | 6039 __ bind(&strings_not_equal); |
5898 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL))); | 6040 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL))); |
5899 __ ret(0); | 6041 __ ret(0); |
5900 | 6042 |
5901 // Check if the length is zero. | 6043 // Check if the length is zero. |
5902 Label compare_chars; | 6044 Label compare_chars; |
5903 __ bind(&check_zero_length); | 6045 __ bind(&check_zero_length); |
5904 STATIC_ASSERT(kSmiTag == 0); | 6046 STATIC_ASSERT(kSmiTag == 0); |
5905 __ test(length, Operand(length)); | 6047 __ test(length, length); |
5906 __ j(not_zero, &compare_chars, Label::kNear); | 6048 __ j(not_zero, &compare_chars, Label::kNear); |
5907 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 6049 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
5908 __ ret(0); | 6050 __ ret(0); |
5909 | 6051 |
5910 // Compare characters. | 6052 // Compare characters. |
5911 __ bind(&compare_chars); | 6053 __ bind(&compare_chars); |
5912 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, | 6054 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, |
5913 &strings_not_equal, Label::kNear); | 6055 &strings_not_equal, Label::kNear); |
5914 | 6056 |
5915 // Characters are equal. | 6057 // Characters are equal. |
(...skipping 14 matching lines...) Expand all Loading... |
5930 // Find minimum length. | 6072 // Find minimum length. |
5931 Label left_shorter; | 6073 Label left_shorter; |
5932 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); | 6074 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); |
5933 __ mov(scratch3, scratch1); | 6075 __ mov(scratch3, scratch1); |
5934 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); | 6076 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); |
5935 | 6077 |
5936 Register length_delta = scratch3; | 6078 Register length_delta = scratch3; |
5937 | 6079 |
5938 __ j(less_equal, &left_shorter, Label::kNear); | 6080 __ j(less_equal, &left_shorter, Label::kNear); |
5939 // Right string is shorter. Change scratch1 to be length of right string. | 6081 // Right string is shorter. Change scratch1 to be length of right string. |
5940 __ sub(scratch1, Operand(length_delta)); | 6082 __ sub(scratch1, length_delta); |
5941 __ bind(&left_shorter); | 6083 __ bind(&left_shorter); |
5942 | 6084 |
5943 Register min_length = scratch1; | 6085 Register min_length = scratch1; |
5944 | 6086 |
5945 // If either length is zero, just compare lengths. | 6087 // If either length is zero, just compare lengths. |
5946 Label compare_lengths; | 6088 Label compare_lengths; |
5947 __ test(min_length, Operand(min_length)); | 6089 __ test(min_length, min_length); |
5948 __ j(zero, &compare_lengths, Label::kNear); | 6090 __ j(zero, &compare_lengths, Label::kNear); |
5949 | 6091 |
5950 // Compare characters. | 6092 // Compare characters. |
5951 Label result_not_equal; | 6093 Label result_not_equal; |
5952 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, | 6094 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, |
5953 &result_not_equal, Label::kNear); | 6095 &result_not_equal, Label::kNear); |
5954 | 6096 |
5955 // Compare lengths - strings up to min-length are equal. | 6097 // Compare lengths - strings up to min-length are equal. |
5956 __ bind(&compare_lengths); | 6098 __ bind(&compare_lengths); |
5957 __ test(length_delta, Operand(length_delta)); | 6099 __ test(length_delta, length_delta); |
5958 __ j(not_zero, &result_not_equal, Label::kNear); | 6100 __ j(not_zero, &result_not_equal, Label::kNear); |
5959 | 6101 |
5960 // Result is EQUAL. | 6102 // Result is EQUAL. |
5961 STATIC_ASSERT(EQUAL == 0); | 6103 STATIC_ASSERT(EQUAL == 0); |
5962 STATIC_ASSERT(kSmiTag == 0); | 6104 STATIC_ASSERT(kSmiTag == 0); |
5963 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 6105 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
5964 __ ret(0); | 6106 __ ret(0); |
5965 | 6107 |
5966 Label result_greater; | 6108 Label result_greater; |
5967 __ bind(&result_not_equal); | 6109 __ bind(&result_not_equal); |
(...skipping 28 matching lines...) Expand all Loading... |
5996 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); | 6138 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize)); |
5997 __ neg(length); | 6139 __ neg(length); |
5998 Register index = length; // index = -length; | 6140 Register index = length; // index = -length; |
5999 | 6141 |
6000 // Compare loop. | 6142 // Compare loop. |
6001 Label loop; | 6143 Label loop; |
6002 __ bind(&loop); | 6144 __ bind(&loop); |
6003 __ mov_b(scratch, Operand(left, index, times_1, 0)); | 6145 __ mov_b(scratch, Operand(left, index, times_1, 0)); |
6004 __ cmpb(scratch, Operand(right, index, times_1, 0)); | 6146 __ cmpb(scratch, Operand(right, index, times_1, 0)); |
6005 __ j(not_equal, chars_not_equal, chars_not_equal_near); | 6147 __ j(not_equal, chars_not_equal, chars_not_equal_near); |
6006 __ add(Operand(index), Immediate(1)); | 6148 __ add(index, Immediate(1)); |
6007 __ j(not_zero, &loop); | 6149 __ j(not_zero, &loop); |
6008 } | 6150 } |
6009 | 6151 |
6010 | 6152 |
6011 void StringCompareStub::Generate(MacroAssembler* masm) { | 6153 void StringCompareStub::Generate(MacroAssembler* masm) { |
6012 Label runtime; | 6154 Label runtime; |
6013 | 6155 |
6014 // Stack frame on entry. | 6156 // Stack frame on entry. |
6015 // esp[0]: return address | 6157 // esp[0]: return address |
6016 // esp[4]: right string | 6158 // esp[4]: right string |
6017 // esp[8]: left string | 6159 // esp[8]: left string |
6018 | 6160 |
6019 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left | 6161 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left |
6020 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right | 6162 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right |
6021 | 6163 |
6022 Label not_same; | 6164 Label not_same; |
6023 __ cmp(edx, Operand(eax)); | 6165 __ cmp(edx, eax); |
6024 __ j(not_equal, ¬_same, Label::kNear); | 6166 __ j(not_equal, ¬_same, Label::kNear); |
6025 STATIC_ASSERT(EQUAL == 0); | 6167 STATIC_ASSERT(EQUAL == 0); |
6026 STATIC_ASSERT(kSmiTag == 0); | 6168 STATIC_ASSERT(kSmiTag == 0); |
6027 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 6169 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
6028 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1); | 6170 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1); |
6029 __ ret(2 * kPointerSize); | 6171 __ ret(2 * kPointerSize); |
6030 | 6172 |
6031 __ bind(¬_same); | 6173 __ bind(¬_same); |
6032 | 6174 |
6033 // Check that both objects are sequential ascii strings. | 6175 // Check that both objects are sequential ascii strings. |
6034 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime); | 6176 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime); |
6035 | 6177 |
6036 // Compare flat ascii strings. | 6178 // Compare flat ascii strings. |
6037 // Drop arguments from the stack. | 6179 // Drop arguments from the stack. |
6038 __ pop(ecx); | 6180 __ pop(ecx); |
6039 __ add(Operand(esp), Immediate(2 * kPointerSize)); | 6181 __ add(esp, Immediate(2 * kPointerSize)); |
6040 __ push(ecx); | 6182 __ push(ecx); |
6041 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi); | 6183 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi); |
6042 | 6184 |
6043 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 6185 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
6044 // tagged as a small integer. | 6186 // tagged as a small integer. |
6045 __ bind(&runtime); | 6187 __ bind(&runtime); |
6046 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 6188 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
6047 } | 6189 } |
6048 | 6190 |
6049 | 6191 |
6050 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 6192 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
6051 ASSERT(state_ == CompareIC::SMIS); | 6193 ASSERT(state_ == CompareIC::SMIS); |
6052 Label miss; | 6194 Label miss; |
6053 __ mov(ecx, Operand(edx)); | 6195 __ mov(ecx, edx); |
6054 __ or_(ecx, Operand(eax)); | 6196 __ or_(ecx, eax); |
6055 __ JumpIfNotSmi(ecx, &miss, Label::kNear); | 6197 __ JumpIfNotSmi(ecx, &miss, Label::kNear); |
6056 | 6198 |
6057 if (GetCondition() == equal) { | 6199 if (GetCondition() == equal) { |
6058 // For equality we do not care about the sign of the result. | 6200 // For equality we do not care about the sign of the result. |
6059 __ sub(eax, Operand(edx)); | 6201 __ sub(eax, edx); |
6060 } else { | 6202 } else { |
6061 Label done; | 6203 Label done; |
6062 __ sub(edx, Operand(eax)); | 6204 __ sub(edx, eax); |
6063 __ j(no_overflow, &done, Label::kNear); | 6205 __ j(no_overflow, &done, Label::kNear); |
6064 // Correct sign of result in case of overflow. | 6206 // Correct sign of result in case of overflow. |
6065 __ not_(edx); | 6207 __ not_(edx); |
6066 __ bind(&done); | 6208 __ bind(&done); |
6067 __ mov(eax, edx); | 6209 __ mov(eax, edx); |
6068 } | 6210 } |
6069 __ ret(0); | 6211 __ ret(0); |
6070 | 6212 |
6071 __ bind(&miss); | 6213 __ bind(&miss); |
6072 GenerateMiss(masm); | 6214 GenerateMiss(masm); |
6073 } | 6215 } |
6074 | 6216 |
6075 | 6217 |
6076 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { | 6218 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { |
6077 ASSERT(state_ == CompareIC::HEAP_NUMBERS); | 6219 ASSERT(state_ == CompareIC::HEAP_NUMBERS); |
6078 | 6220 |
6079 Label generic_stub; | 6221 Label generic_stub; |
6080 Label unordered; | 6222 Label unordered; |
6081 Label miss; | 6223 Label miss; |
6082 __ mov(ecx, Operand(edx)); | 6224 __ mov(ecx, edx); |
6083 __ and_(ecx, Operand(eax)); | 6225 __ and_(ecx, eax); |
6084 __ JumpIfSmi(ecx, &generic_stub, Label::kNear); | 6226 __ JumpIfSmi(ecx, &generic_stub, Label::kNear); |
6085 | 6227 |
6086 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); | 6228 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); |
6087 __ j(not_equal, &miss, Label::kNear); | 6229 __ j(not_equal, &miss, Label::kNear); |
6088 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); | 6230 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); |
6089 __ j(not_equal, &miss, Label::kNear); | 6231 __ j(not_equal, &miss, Label::kNear); |
6090 | 6232 |
6091 // Inlining the double comparison and falling back to the general compare | 6233 // Inlining the double comparison and falling back to the general compare |
6092 // stub if NaN is involved or SS2 or CMOV is unsupported. | 6234 // stub if NaN is involved or SS2 or CMOV is unsupported. |
6093 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) { | 6235 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) { |
6094 CpuFeatures::Scope scope1(SSE2); | 6236 CpuFeatures::Scope scope1(SSE2); |
6095 CpuFeatures::Scope scope2(CMOV); | 6237 CpuFeatures::Scope scope2(CMOV); |
6096 | 6238 |
6097 // Load left and right operand | 6239 // Load left and right operand |
6098 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); | 6240 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); |
6099 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 6241 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
6100 | 6242 |
6101 // Compare operands | 6243 // Compare operands |
6102 __ ucomisd(xmm0, xmm1); | 6244 __ ucomisd(xmm0, xmm1); |
6103 | 6245 |
6104 // Don't base result on EFLAGS when a NaN is involved. | 6246 // Don't base result on EFLAGS when a NaN is involved. |
6105 __ j(parity_even, &unordered, Label::kNear); | 6247 __ j(parity_even, &unordered, Label::kNear); |
6106 | 6248 |
6107 // Return a result of -1, 0, or 1, based on EFLAGS. | 6249 // Return a result of -1, 0, or 1, based on EFLAGS. |
6108 // Performing mov, because xor would destroy the flag register. | 6250 // Performing mov, because xor would destroy the flag register. |
6109 __ mov(eax, 0); // equal | 6251 __ mov(eax, 0); // equal |
6110 __ mov(ecx, Immediate(Smi::FromInt(1))); | 6252 __ mov(ecx, Immediate(Smi::FromInt(1))); |
6111 __ cmov(above, eax, Operand(ecx)); | 6253 __ cmov(above, eax, ecx); |
6112 __ mov(ecx, Immediate(Smi::FromInt(-1))); | 6254 __ mov(ecx, Immediate(Smi::FromInt(-1))); |
6113 __ cmov(below, eax, Operand(ecx)); | 6255 __ cmov(below, eax, ecx); |
6114 __ ret(0); | 6256 __ ret(0); |
6115 | 6257 |
6116 __ bind(&unordered); | 6258 __ bind(&unordered); |
6117 } | 6259 } |
6118 | 6260 |
6119 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); | 6261 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); |
6120 __ bind(&generic_stub); | 6262 __ bind(&generic_stub); |
6121 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 6263 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
6122 | 6264 |
6123 __ bind(&miss); | 6265 __ bind(&miss); |
6124 GenerateMiss(masm); | 6266 GenerateMiss(masm); |
6125 } | 6267 } |
6126 | 6268 |
6127 | 6269 |
6128 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) { | 6270 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) { |
6129 ASSERT(state_ == CompareIC::SYMBOLS); | 6271 ASSERT(state_ == CompareIC::SYMBOLS); |
6130 ASSERT(GetCondition() == equal); | 6272 ASSERT(GetCondition() == equal); |
6131 | 6273 |
6132 // Registers containing left and right operands respectively. | 6274 // Registers containing left and right operands respectively. |
6133 Register left = edx; | 6275 Register left = edx; |
6134 Register right = eax; | 6276 Register right = eax; |
6135 Register tmp1 = ecx; | 6277 Register tmp1 = ecx; |
6136 Register tmp2 = ebx; | 6278 Register tmp2 = ebx; |
6137 | 6279 |
6138 // Check that both operands are heap objects. | 6280 // Check that both operands are heap objects. |
6139 Label miss; | 6281 Label miss; |
6140 __ mov(tmp1, Operand(left)); | 6282 __ mov(tmp1, left); |
6141 STATIC_ASSERT(kSmiTag == 0); | 6283 STATIC_ASSERT(kSmiTag == 0); |
6142 __ and_(tmp1, Operand(right)); | 6284 __ and_(tmp1, right); |
6143 __ JumpIfSmi(tmp1, &miss, Label::kNear); | 6285 __ JumpIfSmi(tmp1, &miss, Label::kNear); |
6144 | 6286 |
6145 // Check that both operands are symbols. | 6287 // Check that both operands are symbols. |
6146 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 6288 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
6147 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 6289 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
6148 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 6290 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
6149 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 6291 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
6150 STATIC_ASSERT(kSymbolTag != 0); | 6292 STATIC_ASSERT(kSymbolTag != 0); |
6151 __ and_(tmp1, Operand(tmp2)); | 6293 __ and_(tmp1, tmp2); |
6152 __ test(tmp1, Immediate(kIsSymbolMask)); | 6294 __ test(tmp1, Immediate(kIsSymbolMask)); |
6153 __ j(zero, &miss, Label::kNear); | 6295 __ j(zero, &miss, Label::kNear); |
6154 | 6296 |
6155 // Symbols are compared by identity. | 6297 // Symbols are compared by identity. |
6156 Label done; | 6298 Label done; |
6157 __ cmp(left, Operand(right)); | 6299 __ cmp(left, right); |
6158 // Make sure eax is non-zero. At this point input operands are | 6300 // Make sure eax is non-zero. At this point input operands are |
6159 // guaranteed to be non-zero. | 6301 // guaranteed to be non-zero. |
6160 ASSERT(right.is(eax)); | 6302 ASSERT(right.is(eax)); |
6161 __ j(not_equal, &done, Label::kNear); | 6303 __ j(not_equal, &done, Label::kNear); |
6162 STATIC_ASSERT(EQUAL == 0); | 6304 STATIC_ASSERT(EQUAL == 0); |
6163 STATIC_ASSERT(kSmiTag == 0); | 6305 STATIC_ASSERT(kSmiTag == 0); |
6164 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 6306 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
6165 __ bind(&done); | 6307 __ bind(&done); |
6166 __ ret(0); | 6308 __ ret(0); |
6167 | 6309 |
6168 __ bind(&miss); | 6310 __ bind(&miss); |
6169 GenerateMiss(masm); | 6311 GenerateMiss(masm); |
6170 } | 6312 } |
6171 | 6313 |
6172 | 6314 |
6173 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { | 6315 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { |
6174 ASSERT(state_ == CompareIC::STRINGS); | 6316 ASSERT(state_ == CompareIC::STRINGS); |
6175 ASSERT(GetCondition() == equal); | 6317 ASSERT(GetCondition() == equal); |
6176 Label miss; | 6318 Label miss; |
6177 | 6319 |
6178 // Registers containing left and right operands respectively. | 6320 // Registers containing left and right operands respectively. |
6179 Register left = edx; | 6321 Register left = edx; |
6180 Register right = eax; | 6322 Register right = eax; |
6181 Register tmp1 = ecx; | 6323 Register tmp1 = ecx; |
6182 Register tmp2 = ebx; | 6324 Register tmp2 = ebx; |
6183 Register tmp3 = edi; | 6325 Register tmp3 = edi; |
6184 | 6326 |
6185 // Check that both operands are heap objects. | 6327 // Check that both operands are heap objects. |
6186 __ mov(tmp1, Operand(left)); | 6328 __ mov(tmp1, left); |
6187 STATIC_ASSERT(kSmiTag == 0); | 6329 STATIC_ASSERT(kSmiTag == 0); |
6188 __ and_(tmp1, Operand(right)); | 6330 __ and_(tmp1, right); |
6189 __ JumpIfSmi(tmp1, &miss); | 6331 __ JumpIfSmi(tmp1, &miss); |
6190 | 6332 |
6191 // Check that both operands are strings. This leaves the instance | 6333 // Check that both operands are strings. This leaves the instance |
6192 // types loaded in tmp1 and tmp2. | 6334 // types loaded in tmp1 and tmp2. |
6193 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); | 6335 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); |
6194 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); | 6336 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); |
6195 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); | 6337 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); |
6196 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); | 6338 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); |
6197 __ mov(tmp3, tmp1); | 6339 __ mov(tmp3, tmp1); |
6198 STATIC_ASSERT(kNotStringTag != 0); | 6340 STATIC_ASSERT(kNotStringTag != 0); |
6199 __ or_(tmp3, Operand(tmp2)); | 6341 __ or_(tmp3, tmp2); |
6200 __ test(tmp3, Immediate(kIsNotStringMask)); | 6342 __ test(tmp3, Immediate(kIsNotStringMask)); |
6201 __ j(not_zero, &miss); | 6343 __ j(not_zero, &miss); |
6202 | 6344 |
6203 // Fast check for identical strings. | 6345 // Fast check for identical strings. |
6204 Label not_same; | 6346 Label not_same; |
6205 __ cmp(left, Operand(right)); | 6347 __ cmp(left, right); |
6206 __ j(not_equal, ¬_same, Label::kNear); | 6348 __ j(not_equal, ¬_same, Label::kNear); |
6207 STATIC_ASSERT(EQUAL == 0); | 6349 STATIC_ASSERT(EQUAL == 0); |
6208 STATIC_ASSERT(kSmiTag == 0); | 6350 STATIC_ASSERT(kSmiTag == 0); |
6209 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); | 6351 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); |
6210 __ ret(0); | 6352 __ ret(0); |
6211 | 6353 |
6212 // Handle not identical strings. | 6354 // Handle not identical strings. |
6213 __ bind(¬_same); | 6355 __ bind(¬_same); |
6214 | 6356 |
6215 // Check that both strings are symbols. If they are, we're done | 6357 // Check that both strings are symbols. If they are, we're done |
6216 // because we already know they are not identical. | 6358 // because we already know they are not identical. |
6217 Label do_compare; | 6359 Label do_compare; |
6218 STATIC_ASSERT(kSymbolTag != 0); | 6360 STATIC_ASSERT(kSymbolTag != 0); |
6219 __ and_(tmp1, Operand(tmp2)); | 6361 __ and_(tmp1, tmp2); |
6220 __ test(tmp1, Immediate(kIsSymbolMask)); | 6362 __ test(tmp1, Immediate(kIsSymbolMask)); |
6221 __ j(zero, &do_compare, Label::kNear); | 6363 __ j(zero, &do_compare, Label::kNear); |
6222 // Make sure eax is non-zero. At this point input operands are | 6364 // Make sure eax is non-zero. At this point input operands are |
6223 // guaranteed to be non-zero. | 6365 // guaranteed to be non-zero. |
6224 ASSERT(right.is(eax)); | 6366 ASSERT(right.is(eax)); |
6225 __ ret(0); | 6367 __ ret(0); |
6226 | 6368 |
6227 // Check that both strings are sequential ASCII. | 6369 // Check that both strings are sequential ASCII. |
6228 Label runtime; | 6370 Label runtime; |
6229 __ bind(&do_compare); | 6371 __ bind(&do_compare); |
(...skipping 12 matching lines...) Expand all Loading... |
6242 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); | 6384 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); |
6243 | 6385 |
6244 __ bind(&miss); | 6386 __ bind(&miss); |
6245 GenerateMiss(masm); | 6387 GenerateMiss(masm); |
6246 } | 6388 } |
6247 | 6389 |
6248 | 6390 |
6249 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 6391 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
6250 ASSERT(state_ == CompareIC::OBJECTS); | 6392 ASSERT(state_ == CompareIC::OBJECTS); |
6251 Label miss; | 6393 Label miss; |
6252 __ mov(ecx, Operand(edx)); | 6394 __ mov(ecx, edx); |
6253 __ and_(ecx, Operand(eax)); | 6395 __ and_(ecx, eax); |
6254 __ JumpIfSmi(ecx, &miss, Label::kNear); | 6396 __ JumpIfSmi(ecx, &miss, Label::kNear); |
6255 | 6397 |
6256 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); | 6398 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); |
6257 __ j(not_equal, &miss, Label::kNear); | 6399 __ j(not_equal, &miss, Label::kNear); |
6258 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); | 6400 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); |
6259 __ j(not_equal, &miss, Label::kNear); | 6401 __ j(not_equal, &miss, Label::kNear); |
6260 | 6402 |
6261 ASSERT(GetCondition() == equal); | 6403 ASSERT(GetCondition() == equal); |
6262 __ sub(eax, Operand(edx)); | 6404 __ sub(eax, edx); |
6263 __ ret(0); | 6405 __ ret(0); |
6264 | 6406 |
6265 __ bind(&miss); | 6407 __ bind(&miss); |
6266 GenerateMiss(masm); | 6408 GenerateMiss(masm); |
6267 } | 6409 } |
6268 | 6410 |
6269 | 6411 |
6270 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 6412 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
6271 // Save the registers. | 6413 // Save the registers. |
6272 __ pop(ecx); | 6414 __ pop(ecx); |
6273 __ push(edx); | 6415 __ push(edx); |
6274 __ push(eax); | 6416 __ push(eax); |
6275 __ push(ecx); | 6417 __ push(ecx); |
6276 | 6418 |
6277 // Call the runtime system in a fresh internal frame. | 6419 { |
6278 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), | 6420 // Call the runtime system in a fresh internal frame. |
6279 masm->isolate()); | 6421 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), |
6280 __ EnterInternalFrame(); | 6422 masm->isolate()); |
6281 __ push(edx); | 6423 FrameScope scope(masm, StackFrame::INTERNAL); |
6282 __ push(eax); | 6424 __ push(edx); |
6283 __ push(Immediate(Smi::FromInt(op_))); | 6425 __ push(eax); |
6284 __ CallExternalReference(miss, 3); | 6426 __ push(Immediate(Smi::FromInt(op_))); |
6285 __ LeaveInternalFrame(); | 6427 __ CallExternalReference(miss, 3); |
| 6428 } |
6286 | 6429 |
6287 // Compute the entry point of the rewritten stub. | 6430 // Compute the entry point of the rewritten stub. |
6288 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); | 6431 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); |
6289 | 6432 |
6290 // Restore registers. | 6433 // Restore registers. |
6291 __ pop(ecx); | 6434 __ pop(ecx); |
6292 __ pop(eax); | 6435 __ pop(eax); |
6293 __ pop(edx); | 6436 __ pop(edx); |
6294 __ push(ecx); | 6437 __ push(ecx); |
6295 | 6438 |
6296 // Do a tail call to the rewritten stub. | 6439 // Do a tail call to the rewritten stub. |
6297 __ jmp(Operand(edi)); | 6440 __ jmp(edi); |
6298 } | 6441 } |
6299 | 6442 |
6300 | 6443 |
6301 // Helper function used to check that the dictionary doesn't contain | 6444 // Helper function used to check that the dictionary doesn't contain |
6302 // the property. This function may return false negatives, so miss_label | 6445 // the property. This function may return false negatives, so miss_label |
6303 // must always call a backup property check that is complete. | 6446 // must always call a backup property check that is complete. |
6304 // This function is safe to call if the receiver has fast properties. | 6447 // This function is safe to call if the receiver has fast properties. |
6305 // Name must be a symbol and receiver must be a heap object. | 6448 // Name must be a symbol and receiver must be a heap object. |
6306 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( | 6449 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( |
6307 MacroAssembler* masm, | 6450 MacroAssembler* masm, |
6308 Label* miss, | 6451 Label* miss, |
6309 Label* done, | 6452 Label* done, |
6310 Register properties, | 6453 Register properties, |
6311 String* name, | 6454 String* name, |
6312 Register r0) { | 6455 Register r0) { |
6313 ASSERT(name->IsSymbol()); | 6456 ASSERT(name->IsSymbol()); |
6314 | 6457 |
6315 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 6458 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
6316 // not equal to the name and kProbes-th slot is not used (its name is the | 6459 // not equal to the name and kProbes-th slot is not used (its name is the |
6317 // undefined value), it guarantees the hash table doesn't contain the | 6460 // undefined value), it guarantees the hash table doesn't contain the |
6318 // property. It's true even if some slots represent deleted properties | 6461 // property. It's true even if some slots represent deleted properties |
6319 // (their names are the null value). | 6462 // (their names are the null value). |
6320 for (int i = 0; i < kInlinedProbes; i++) { | 6463 for (int i = 0; i < kInlinedProbes; i++) { |
6321 // Compute the masked index: (hash + i + i * i) & mask. | 6464 // Compute the masked index: (hash + i + i * i) & mask. |
6322 Register index = r0; | 6465 Register index = r0; |
6323 // Capacity is smi 2^n. | 6466 // Capacity is smi 2^n. |
6324 __ mov(index, FieldOperand(properties, kCapacityOffset)); | 6467 __ mov(index, FieldOperand(properties, kCapacityOffset)); |
6325 __ dec(index); | 6468 __ dec(index); |
6326 __ and_(Operand(index), | 6469 __ and_(index, |
6327 Immediate(Smi::FromInt(name->Hash() + | 6470 Immediate(Smi::FromInt(name->Hash() + |
6328 StringDictionary::GetProbeOffset(i)))); | 6471 StringDictionary::GetProbeOffset(i)))); |
6329 | 6472 |
6330 // Scale the index by multiplying by the entry size. | 6473 // Scale the index by multiplying by the entry size. |
6331 ASSERT(StringDictionary::kEntrySize == 3); | 6474 ASSERT(StringDictionary::kEntrySize == 3); |
6332 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. | 6475 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. |
6333 Register entity_name = r0; | 6476 Register entity_name = r0; |
6334 // Having undefined at this place means the name is not contained. | 6477 // Having undefined at this place means the name is not contained. |
6335 ASSERT_EQ(kSmiTagSize, 1); | 6478 ASSERT_EQ(kSmiTagSize, 1); |
6336 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, | 6479 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, |
6337 kElementsStartOffset - kHeapObjectTag)); | 6480 kElementsStartOffset - kHeapObjectTag)); |
(...skipping 12 matching lines...) Expand all Loading... |
6350 } | 6493 } |
6351 | 6494 |
6352 StringDictionaryLookupStub stub(properties, | 6495 StringDictionaryLookupStub stub(properties, |
6353 r0, | 6496 r0, |
6354 r0, | 6497 r0, |
6355 StringDictionaryLookupStub::NEGATIVE_LOOKUP); | 6498 StringDictionaryLookupStub::NEGATIVE_LOOKUP); |
6356 __ push(Immediate(Handle<Object>(name))); | 6499 __ push(Immediate(Handle<Object>(name))); |
6357 __ push(Immediate(name->Hash())); | 6500 __ push(Immediate(name->Hash())); |
6358 MaybeObject* result = masm->TryCallStub(&stub); | 6501 MaybeObject* result = masm->TryCallStub(&stub); |
6359 if (result->IsFailure()) return result; | 6502 if (result->IsFailure()) return result; |
6360 __ test(r0, Operand(r0)); | 6503 __ test(r0, r0); |
6361 __ j(not_zero, miss); | 6504 __ j(not_zero, miss); |
6362 __ jmp(done); | 6505 __ jmp(done); |
6363 return result; | 6506 return result; |
6364 } | 6507 } |
6365 | 6508 |
6366 | 6509 |
6367 // Probe the string dictionary in the |elements| register. Jump to the | 6510 // Probe the string dictionary in the |elements| register. Jump to the |
6368 // |done| label if a property with the given name is found leaving the | 6511 // |done| label if a property with the given name is found leaving the |
6369 // index into the dictionary in |r0|. Jump to the |miss| label | 6512 // index into the dictionary in |r0|. Jump to the |miss| label |
6370 // otherwise. | 6513 // otherwise. |
(...skipping 12 matching lines...) Expand all Loading... |
6383 __ dec(r1); | 6526 __ dec(r1); |
6384 | 6527 |
6385 // Generate an unrolled loop that performs a few probes before | 6528 // Generate an unrolled loop that performs a few probes before |
6386 // giving up. Measurements done on Gmail indicate that 2 probes | 6529 // giving up. Measurements done on Gmail indicate that 2 probes |
6387 // cover ~93% of loads from dictionaries. | 6530 // cover ~93% of loads from dictionaries. |
6388 for (int i = 0; i < kInlinedProbes; i++) { | 6531 for (int i = 0; i < kInlinedProbes; i++) { |
6389 // Compute the masked index: (hash + i + i * i) & mask. | 6532 // Compute the masked index: (hash + i + i * i) & mask. |
6390 __ mov(r0, FieldOperand(name, String::kHashFieldOffset)); | 6533 __ mov(r0, FieldOperand(name, String::kHashFieldOffset)); |
6391 __ shr(r0, String::kHashShift); | 6534 __ shr(r0, String::kHashShift); |
6392 if (i > 0) { | 6535 if (i > 0) { |
6393 __ add(Operand(r0), Immediate(StringDictionary::GetProbeOffset(i))); | 6536 __ add(r0, Immediate(StringDictionary::GetProbeOffset(i))); |
6394 } | 6537 } |
6395 __ and_(r0, Operand(r1)); | 6538 __ and_(r0, r1); |
6396 | 6539 |
6397 // Scale the index by multiplying by the entry size. | 6540 // Scale the index by multiplying by the entry size. |
6398 ASSERT(StringDictionary::kEntrySize == 3); | 6541 ASSERT(StringDictionary::kEntrySize == 3); |
6399 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 | 6542 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 |
6400 | 6543 |
6401 // Check if the key is identical to the name. | 6544 // Check if the key is identical to the name. |
6402 __ cmp(name, Operand(elements, | 6545 __ cmp(name, Operand(elements, |
6403 r0, | 6546 r0, |
6404 times_4, | 6547 times_4, |
6405 kElementsStartOffset - kHeapObjectTag)); | 6548 kElementsStartOffset - kHeapObjectTag)); |
6406 __ j(equal, done); | 6549 __ j(equal, done); |
6407 } | 6550 } |
6408 | 6551 |
6409 StringDictionaryLookupStub stub(elements, | 6552 StringDictionaryLookupStub stub(elements, |
6410 r1, | 6553 r1, |
6411 r0, | 6554 r0, |
6412 POSITIVE_LOOKUP); | 6555 POSITIVE_LOOKUP); |
6413 __ push(name); | 6556 __ push(name); |
6414 __ mov(r0, FieldOperand(name, String::kHashFieldOffset)); | 6557 __ mov(r0, FieldOperand(name, String::kHashFieldOffset)); |
6415 __ shr(r0, String::kHashShift); | 6558 __ shr(r0, String::kHashShift); |
6416 __ push(r0); | 6559 __ push(r0); |
6417 __ CallStub(&stub); | 6560 __ CallStub(&stub); |
6418 | 6561 |
6419 __ test(r1, Operand(r1)); | 6562 __ test(r1, r1); |
6420 __ j(zero, miss); | 6563 __ j(zero, miss); |
6421 __ jmp(done); | 6564 __ jmp(done); |
6422 } | 6565 } |
6423 | 6566 |
6424 | 6567 |
6425 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 6568 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 6569 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 6570 // we cannot call anything that could cause a GC from this stub. |
6426 // Stack frame on entry: | 6571 // Stack frame on entry: |
6427 // esp[0 * kPointerSize]: return address. | 6572 // esp[0 * kPointerSize]: return address. |
6428 // esp[1 * kPointerSize]: key's hash. | 6573 // esp[1 * kPointerSize]: key's hash. |
6429 // esp[2 * kPointerSize]: key. | 6574 // esp[2 * kPointerSize]: key. |
6430 // Registers: | 6575 // Registers: |
6431 // dictionary_: StringDictionary to probe. | 6576 // dictionary_: StringDictionary to probe. |
6432 // result_: used as scratch. | 6577 // result_: used as scratch. |
6433 // index_: will hold an index of entry if lookup is successful. | 6578 // index_: will hold an index of entry if lookup is successful. |
6434 // might alias with result_. | 6579 // might alias with result_. |
6435 // Returns: | 6580 // Returns: |
(...skipping 10 matching lines...) Expand all Loading... |
6446 | 6591 |
6447 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 6592 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
6448 // not equal to the name and kProbes-th slot is not used (its name is the | 6593 // not equal to the name and kProbes-th slot is not used (its name is the |
6449 // undefined value), it guarantees the hash table doesn't contain the | 6594 // undefined value), it guarantees the hash table doesn't contain the |
6450 // property. It's true even if some slots represent deleted properties | 6595 // property. It's true even if some slots represent deleted properties |
6451 // (their names are the null value). | 6596 // (their names are the null value). |
6452 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 6597 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
6453 // Compute the masked index: (hash + i + i * i) & mask. | 6598 // Compute the masked index: (hash + i + i * i) & mask. |
6454 __ mov(scratch, Operand(esp, 2 * kPointerSize)); | 6599 __ mov(scratch, Operand(esp, 2 * kPointerSize)); |
6455 if (i > 0) { | 6600 if (i > 0) { |
6456 __ add(Operand(scratch), | 6601 __ add(scratch, Immediate(StringDictionary::GetProbeOffset(i))); |
6457 Immediate(StringDictionary::GetProbeOffset(i))); | |
6458 } | 6602 } |
6459 __ and_(scratch, Operand(esp, 0)); | 6603 __ and_(scratch, Operand(esp, 0)); |
6460 | 6604 |
6461 // Scale the index by multiplying by the entry size. | 6605 // Scale the index by multiplying by the entry size. |
6462 ASSERT(StringDictionary::kEntrySize == 3); | 6606 ASSERT(StringDictionary::kEntrySize == 3); |
6463 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. | 6607 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. |
6464 | 6608 |
6465 // Having undefined at this place means the name is not contained. | 6609 // Having undefined at this place means the name is not contained. |
6466 ASSERT_EQ(kSmiTagSize, 1); | 6610 ASSERT_EQ(kSmiTagSize, 1); |
6467 __ mov(scratch, Operand(dictionary_, | 6611 __ mov(scratch, Operand(dictionary_, |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6503 __ Drop(1); | 6647 __ Drop(1); |
6504 __ ret(2 * kPointerSize); | 6648 __ ret(2 * kPointerSize); |
6505 | 6649 |
6506 __ bind(¬_in_dictionary); | 6650 __ bind(¬_in_dictionary); |
6507 __ mov(result_, Immediate(0)); | 6651 __ mov(result_, Immediate(0)); |
6508 __ Drop(1); | 6652 __ Drop(1); |
6509 __ ret(2 * kPointerSize); | 6653 __ ret(2 * kPointerSize); |
6510 } | 6654 } |
6511 | 6655 |
6512 | 6656 |
| 6657 struct AheadOfTimeWriteBarrierStubList { |
| 6658 Register object, value, address; |
| 6659 RememberedSetAction action; |
| 6660 }; |
| 6661 |
| 6662 |
| 6663 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { |
| 6664 // Used in RegExpExecStub. |
| 6665 { ebx, eax, edi, EMIT_REMEMBERED_SET }, |
| 6666 // Used in CompileArrayPushCall. |
| 6667 { ebx, ecx, edx, EMIT_REMEMBERED_SET }, |
| 6668 { ebx, edi, edx, OMIT_REMEMBERED_SET }, |
| 6669 // Used in CompileStoreGlobal and CallFunctionStub. |
| 6670 { ebx, ecx, edx, OMIT_REMEMBERED_SET }, |
| 6671 // Used in StoreStubCompiler::CompileStoreField and |
| 6672 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. |
| 6673 { edx, ecx, ebx, EMIT_REMEMBERED_SET }, |
| 6674 // GenerateStoreField calls the stub with two different permutations of |
| 6675 // registers. This is the second. |
| 6676 { ebx, ecx, edx, EMIT_REMEMBERED_SET }, |
| 6677 // StoreIC::GenerateNormal via GenerateDictionaryStore |
| 6678 { ebx, edi, edx, EMIT_REMEMBERED_SET }, |
| 6679 // KeyedStoreIC::GenerateGeneric. |
| 6680 { ebx, edx, ecx, EMIT_REMEMBERED_SET}, |
| 6681 // KeyedStoreStubCompiler::GenerateStoreFastElement. |
| 6682 { edi, edx, ecx, EMIT_REMEMBERED_SET}, |
| 6683 // Null termination. |
| 6684 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} |
| 6685 }; |
| 6686 |
| 6687 |
| 6688 bool RecordWriteStub::IsPregenerated() { |
| 6689 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
| 6690 !entry->object.is(no_reg); |
| 6691 entry++) { |
| 6692 if (object_.is(entry->object) && |
| 6693 value_.is(entry->value) && |
| 6694 address_.is(entry->address) && |
| 6695 remembered_set_action_ == entry->action && |
| 6696 save_fp_regs_mode_ == kDontSaveFPRegs) { |
| 6697 return true; |
| 6698 } |
| 6699 } |
| 6700 return false; |
| 6701 } |
| 6702 |
| 6703 |
| 6704 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() { |
| 6705 StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
| 6706 stub1.GetCode()->set_is_pregenerated(true); |
| 6707 |
| 6708 CpuFeatures::TryForceFeatureScope scope(SSE2); |
| 6709 if (CpuFeatures::IsSupported(SSE2)) { |
| 6710 StoreBufferOverflowStub stub2(kSaveFPRegs); |
| 6711 stub2.GetCode()->set_is_pregenerated(true); |
| 6712 } |
| 6713 } |
| 6714 |
| 6715 |
| 6716 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() { |
| 6717 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
| 6718 !entry->object.is(no_reg); |
| 6719 entry++) { |
| 6720 RecordWriteStub stub(entry->object, |
| 6721 entry->value, |
| 6722 entry->address, |
| 6723 entry->action, |
| 6724 kDontSaveFPRegs); |
| 6725 stub.GetCode()->set_is_pregenerated(true); |
| 6726 } |
| 6727 } |
| 6728 |
| 6729 |
| 6730 // Takes the input in 3 registers: address_ value_ and object_. A pointer to |
| 6731 // the value has just been written into the object, now this stub makes sure |
| 6732 // we keep the GC informed. The word in the object where the value has been |
| 6733 // written is in the address register. |
| 6734 void RecordWriteStub::Generate(MacroAssembler* masm) { |
| 6735 Label skip_to_incremental_noncompacting; |
| 6736 Label skip_to_incremental_compacting; |
| 6737 |
| 6738 // The first two instructions are generated with labels so as to get the |
| 6739 // offset fixed up correctly by the bind(Label*) call. We patch it back and |
| 6740 // forth between a compare instructions (a nop in this position) and the |
| 6741 // real branch when we start and stop incremental heap marking. |
| 6742 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); |
| 6743 __ jmp(&skip_to_incremental_compacting, Label::kFar); |
| 6744 |
| 6745 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 6746 __ RememberedSetHelper(object_, |
| 6747 address_, |
| 6748 value_, |
| 6749 save_fp_regs_mode_, |
| 6750 MacroAssembler::kReturnAtEnd); |
| 6751 } else { |
| 6752 __ ret(0); |
| 6753 } |
| 6754 |
| 6755 __ bind(&skip_to_incremental_noncompacting); |
| 6756 GenerateIncremental(masm, INCREMENTAL); |
| 6757 |
| 6758 __ bind(&skip_to_incremental_compacting); |
| 6759 GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
| 6760 |
| 6761 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. |
| 6762 // Will be checked in IncrementalMarking::ActivateGeneratedStub. |
| 6763 masm->set_byte_at(0, kTwoByteNopInstruction); |
| 6764 masm->set_byte_at(2, kFiveByteNopInstruction); |
| 6765 } |
| 6766 |
| 6767 |
| 6768 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
| 6769 regs_.Save(masm); |
| 6770 |
| 6771 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 6772 Label dont_need_remembered_set; |
| 6773 |
| 6774 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 6775 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. |
| 6776 regs_.scratch0(), |
| 6777 &dont_need_remembered_set); |
| 6778 |
| 6779 __ CheckPageFlag(regs_.object(), |
| 6780 regs_.scratch0(), |
| 6781 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
| 6782 not_zero, |
| 6783 &dont_need_remembered_set); |
| 6784 |
| 6785 // First notify the incremental marker if necessary, then update the |
| 6786 // remembered set. |
| 6787 CheckNeedsToInformIncrementalMarker( |
| 6788 masm, |
| 6789 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, |
| 6790 mode); |
| 6791 InformIncrementalMarker(masm, mode); |
| 6792 regs_.Restore(masm); |
| 6793 __ RememberedSetHelper(object_, |
| 6794 address_, |
| 6795 value_, |
| 6796 save_fp_regs_mode_, |
| 6797 MacroAssembler::kReturnAtEnd); |
| 6798 |
| 6799 __ bind(&dont_need_remembered_set); |
| 6800 } |
| 6801 |
| 6802 CheckNeedsToInformIncrementalMarker( |
| 6803 masm, |
| 6804 kReturnOnNoNeedToInformIncrementalMarker, |
| 6805 mode); |
| 6806 InformIncrementalMarker(masm, mode); |
| 6807 regs_.Restore(masm); |
| 6808 __ ret(0); |
| 6809 } |
| 6810 |
| 6811 |
| 6812 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { |
| 6813 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 6814 int argument_count = 3; |
| 6815 __ PrepareCallCFunction(argument_count, regs_.scratch0()); |
| 6816 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); |
| 6817 if (mode == INCREMENTAL_COMPACTION) { |
| 6818 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. |
| 6819 } else { |
| 6820 ASSERT(mode == INCREMENTAL); |
| 6821 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 6822 __ mov(Operand(esp, 1 * kPointerSize), regs_.scratch0()); // Value. |
| 6823 } |
| 6824 __ mov(Operand(esp, 2 * kPointerSize), |
| 6825 Immediate(ExternalReference::isolate_address())); |
| 6826 |
| 6827 AllowExternalCallThatCantCauseGC scope(masm); |
| 6828 if (mode == INCREMENTAL_COMPACTION) { |
| 6829 __ CallCFunction( |
| 6830 ExternalReference::incremental_evacuation_record_write_function( |
| 6831 masm->isolate()), |
| 6832 argument_count); |
| 6833 } else { |
| 6834 ASSERT(mode == INCREMENTAL); |
| 6835 __ CallCFunction( |
| 6836 ExternalReference::incremental_marking_record_write_function( |
| 6837 masm->isolate()), |
| 6838 argument_count); |
| 6839 } |
| 6840 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
| 6841 } |
| 6842 |
| 6843 |
| 6844 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
| 6845 MacroAssembler* masm, |
| 6846 OnNoNeedToInformIncrementalMarker on_no_need, |
| 6847 Mode mode) { |
| 6848 Label object_is_black, need_incremental, need_incremental_pop_object; |
| 6849 |
| 6850 // Let's look at the color of the object: If it is not black we don't have |
| 6851 // to inform the incremental marker. |
| 6852 __ JumpIfBlack(regs_.object(), |
| 6853 regs_.scratch0(), |
| 6854 regs_.scratch1(), |
| 6855 &object_is_black, |
| 6856 Label::kNear); |
| 6857 |
| 6858 regs_.Restore(masm); |
| 6859 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 6860 __ RememberedSetHelper(object_, |
| 6861 address_, |
| 6862 value_, |
| 6863 save_fp_regs_mode_, |
| 6864 MacroAssembler::kReturnAtEnd); |
| 6865 } else { |
| 6866 __ ret(0); |
| 6867 } |
| 6868 |
| 6869 __ bind(&object_is_black); |
| 6870 |
| 6871 // Get the value from the slot. |
| 6872 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); |
| 6873 |
| 6874 if (mode == INCREMENTAL_COMPACTION) { |
| 6875 Label ensure_not_white; |
| 6876 |
| 6877 __ CheckPageFlag(regs_.scratch0(), // Contains value. |
| 6878 regs_.scratch1(), // Scratch. |
| 6879 MemoryChunk::kEvacuationCandidateMask, |
| 6880 zero, |
| 6881 &ensure_not_white, |
| 6882 Label::kNear); |
| 6883 |
| 6884 __ CheckPageFlag(regs_.object(), |
| 6885 regs_.scratch1(), // Scratch. |
| 6886 MemoryChunk::kSkipEvacuationSlotsRecordingMask, |
| 6887 not_zero, |
| 6888 &ensure_not_white, |
| 6889 Label::kNear); |
| 6890 |
| 6891 __ jmp(&need_incremental); |
| 6892 |
| 6893 __ bind(&ensure_not_white); |
| 6894 } |
| 6895 |
| 6896 // We need an extra register for this, so we push the object register |
| 6897 // temporarily. |
| 6898 __ push(regs_.object()); |
| 6899 __ EnsureNotWhite(regs_.scratch0(), // The value. |
| 6900 regs_.scratch1(), // Scratch. |
| 6901 regs_.object(), // Scratch. |
| 6902 &need_incremental_pop_object, |
| 6903 Label::kNear); |
| 6904 __ pop(regs_.object()); |
| 6905 |
| 6906 regs_.Restore(masm); |
| 6907 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
| 6908 __ RememberedSetHelper(object_, |
| 6909 address_, |
| 6910 value_, |
| 6911 save_fp_regs_mode_, |
| 6912 MacroAssembler::kReturnAtEnd); |
| 6913 } else { |
| 6914 __ ret(0); |
| 6915 } |
| 6916 |
| 6917 __ bind(&need_incremental_pop_object); |
| 6918 __ pop(regs_.object()); |
| 6919 |
| 6920 __ bind(&need_incremental); |
| 6921 |
| 6922 // Fall through when we need to inform the incremental marker. |
| 6923 } |
| 6924 |
| 6925 |
6513 #undef __ | 6926 #undef __ |
6514 | 6927 |
6515 } } // namespace v8::internal | 6928 } } // namespace v8::internal |
6516 | 6929 |
6517 #endif // V8_TARGET_ARCH_IA32 | 6930 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |