Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 7060010: Merge bleeding edge into the GC branch up to 7948. The asserts (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/frames-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 11 matching lines...) Expand all
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_IA32) 30 #if defined(V8_TARGET_ARCH_IA32)
31 31
32 #include "bootstrapper.h"
32 #include "code-stubs.h" 33 #include "code-stubs.h"
33 #include "bootstrapper.h"
34 #include "isolate.h" 34 #include "isolate.h"
35 #include "jsregexp.h" 35 #include "jsregexp.h"
36 #include "macro-assembler-ia32-inl.h" 36 #include "macro-assembler-ia32-inl.h"
37 #include "regexp-macro-assembler.h" 37 #include "regexp-macro-assembler.h"
38 38
39 namespace v8 { 39 namespace v8 {
40 namespace internal { 40 namespace internal {
41 41
42 #define __ ACCESS_MASM(masm) 42 #define __ ACCESS_MASM(masm)
43 43
44 void ToNumberStub::Generate(MacroAssembler* masm) { 44 void ToNumberStub::Generate(MacroAssembler* masm) {
45 // The ToNumber stub takes one argument in eax. 45 // The ToNumber stub takes one argument in eax.
46 NearLabel check_heap_number, call_builtin; 46 Label check_heap_number, call_builtin;
47 __ test(eax, Immediate(kSmiTagMask)); 47 __ test(eax, Immediate(kSmiTagMask));
48 __ j(not_zero, &check_heap_number); 48 __ j(not_zero, &check_heap_number, Label::kNear);
49 __ ret(0); 49 __ ret(0);
50 50
51 __ bind(&check_heap_number); 51 __ bind(&check_heap_number);
52 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 52 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
53 Factory* factory = masm->isolate()->factory(); 53 Factory* factory = masm->isolate()->factory();
54 __ cmp(Operand(ebx), Immediate(factory->heap_number_map())); 54 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
55 __ j(not_equal, &call_builtin); 55 __ j(not_equal, &call_builtin, Label::kNear);
56 __ ret(0); 56 __ ret(0);
57 57
58 __ bind(&call_builtin); 58 __ bind(&call_builtin);
59 __ pop(ecx); // Pop return address. 59 __ pop(ecx); // Pop return address.
60 __ push(eax); 60 __ push(eax);
61 __ push(ecx); // Push return address. 61 __ push(ecx); // Push return address.
62 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); 62 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
63 } 63 }
64 64
65 65
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
236 // Return and remove the on-stack parameters. 236 // Return and remove the on-stack parameters.
237 __ ret(3 * kPointerSize); 237 __ ret(3 * kPointerSize);
238 238
239 __ bind(&slow_case); 239 __ bind(&slow_case);
240 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 240 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
241 } 241 }
242 242
243 243
244 // NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined). 244 // NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
245 void ToBooleanStub::Generate(MacroAssembler* masm) { 245 void ToBooleanStub::Generate(MacroAssembler* masm) {
246 NearLabel false_result, true_result, not_string; 246 Label false_result, true_result, not_string;
247 __ mov(eax, Operand(esp, 1 * kPointerSize)); 247 __ mov(eax, Operand(esp, 1 * kPointerSize));
248 Factory* factory = masm->isolate()->factory();
249
250 // undefined -> false
251 __ cmp(eax, factory->undefined_value());
252 __ j(equal, &false_result);
253
254 // Boolean -> its value
255 __ cmp(eax, factory->true_value());
256 __ j(equal, &true_result);
257 __ cmp(eax, factory->false_value());
258 __ j(equal, &false_result);
259
260 // Smis: 0 -> false, all other -> true
261 __ test(eax, Operand(eax));
262 __ j(zero, &false_result);
263 __ test(eax, Immediate(kSmiTagMask));
264 __ j(zero, &true_result);
248 265
249 // 'null' => false. 266 // 'null' => false.
250 Factory* factory = masm->isolate()->factory();
251 __ cmp(eax, factory->null_value()); 267 __ cmp(eax, factory->null_value());
252 __ j(equal, &false_result); 268 __ j(equal, &false_result, Label::kNear);
253 269
254 // Get the map and type of the heap object. 270 // Get the map and type of the heap object.
255 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 271 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
256 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset)); 272 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
257 273
258 // Undetectable => false. 274 // Undetectable => false.
259 __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 275 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
260 1 << Map::kIsUndetectable); 276 1 << Map::kIsUndetectable);
261 __ j(not_zero, &false_result); 277 __ j(not_zero, &false_result, Label::kNear);
262 278
263 // JavaScript object => true. 279 // JavaScript object => true.
264 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE); 280 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
265 __ j(above_equal, &true_result); 281 __ j(above_equal, &true_result, Label::kNear);
266 282
267 // String value => false iff empty. 283 // String value => false iff empty.
268 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE); 284 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
269 __ j(above_equal, &not_string); 285 __ j(above_equal, &not_string, Label::kNear);
270 STATIC_ASSERT(kSmiTag == 0); 286 STATIC_ASSERT(kSmiTag == 0);
271 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0)); 287 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
272 __ j(zero, &false_result); 288 __ j(zero, &false_result, Label::kNear);
273 __ jmp(&true_result); 289 __ jmp(&true_result, Label::kNear);
274 290
275 __ bind(&not_string); 291 __ bind(&not_string);
276 // HeapNumber => false iff +0, -0, or NaN. 292 // HeapNumber => false iff +0, -0, or NaN.
277 __ cmp(edx, factory->heap_number_map()); 293 __ cmp(edx, factory->heap_number_map());
278 __ j(not_equal, &true_result); 294 __ j(not_equal, &true_result, Label::kNear);
279 __ fldz(); 295 __ fldz();
280 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); 296 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
281 __ FCmp(); 297 __ FCmp();
282 __ j(zero, &false_result); 298 __ j(zero, &false_result, Label::kNear);
283 // Fall through to |true_result|. 299 // Fall through to |true_result|.
284 300
285 // Return 1/0 for true/false in eax. 301 // Return 1/0 for true/false in eax.
286 __ bind(&true_result); 302 __ bind(&true_result);
287 __ mov(eax, 1); 303 __ mov(eax, 1);
288 __ ret(1 * kPointerSize); 304 __ ret(1 * kPointerSize);
289 __ bind(&false_result); 305 __ bind(&false_result);
290 __ mov(eax, 0); 306 __ mov(eax, 0);
291 __ ret(1 * kPointerSize); 307 __ ret(1 * kPointerSize);
292 } 308 }
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
358 Label* non_float, 374 Label* non_float,
359 Register scratch); 375 Register scratch);
360 376
361 // Checks that the two floating point numbers on top of the FPU stack 377 // Checks that the two floating point numbers on top of the FPU stack
362 // have int32 values. 378 // have int32 values.
363 static void CheckFloatOperandsAreInt32(MacroAssembler* masm, 379 static void CheckFloatOperandsAreInt32(MacroAssembler* masm,
364 Label* non_int32); 380 Label* non_int32);
365 381
366 // Takes the operands in edx and eax and loads them as integers in eax 382 // Takes the operands in edx and eax and loads them as integers in eax
367 // and ecx. 383 // and ecx.
368 static void LoadAsIntegers(MacroAssembler* masm,
369 TypeInfo type_info,
370 bool use_sse3,
371 Label* operand_conversion_failure);
372 static void LoadNumbersAsIntegers(MacroAssembler* masm,
373 TypeInfo type_info,
374 bool use_sse3,
375 Label* operand_conversion_failure);
376 static void LoadUnknownsAsIntegers(MacroAssembler* masm, 384 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
377 bool use_sse3, 385 bool use_sse3,
378 Label* operand_conversion_failure); 386 Label* operand_conversion_failure);
379 387
380 // Must only be called after LoadUnknownsAsIntegers. Assumes that the 388 // Must only be called after LoadUnknownsAsIntegers. Assumes that the
381 // operands are pushed on the stack, and that their conversions to int32 389 // operands are pushed on the stack, and that their conversions to int32
382 // are in eax and ecx. Checks that the original numbers were in the int32 390 // are in eax and ecx. Checks that the original numbers were in the int32
383 // range. 391 // range.
384 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm, 392 static void CheckLoadedIntegersWereInt32(MacroAssembler* masm,
385 bool use_sse3, 393 bool use_sse3,
(...skipping 21 matching lines...) Expand all
407 Register scratch); 415 Register scratch);
408 }; 416 };
409 417
410 418
411 // Get the integer part of a heap number. Surprisingly, all this bit twiddling 419 // Get the integer part of a heap number. Surprisingly, all this bit twiddling
412 // is faster than using the built-in instructions on floating point registers. 420 // is faster than using the built-in instructions on floating point registers.
413 // Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the 421 // Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
414 // trashed registers. 422 // trashed registers.
415 static void IntegerConvert(MacroAssembler* masm, 423 static void IntegerConvert(MacroAssembler* masm,
416 Register source, 424 Register source,
417 TypeInfo type_info,
418 bool use_sse3, 425 bool use_sse3,
419 Label* conversion_failure) { 426 Label* conversion_failure) {
420 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx)); 427 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
421 Label done, right_exponent, normal_exponent; 428 Label done, right_exponent, normal_exponent;
422 Register scratch = ebx; 429 Register scratch = ebx;
423 Register scratch2 = edi; 430 Register scratch2 = edi;
424 if (type_info.IsInteger32() && CpuFeatures::IsSupported(SSE2)) { 431 // Get exponent word.
425 CpuFeatures::Scope scope(SSE2); 432 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
426 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset)); 433 // Get exponent alone in scratch2.
427 return; 434 __ mov(scratch2, scratch);
428 } 435 __ and_(scratch2, HeapNumber::kExponentMask);
429 if (!type_info.IsInteger32() || !use_sse3) {
430 // Get exponent word.
431 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
432 // Get exponent alone in scratch2.
433 __ mov(scratch2, scratch);
434 __ and_(scratch2, HeapNumber::kExponentMask);
435 }
436 if (use_sse3) { 436 if (use_sse3) {
437 CpuFeatures::Scope scope(SSE3); 437 CpuFeatures::Scope scope(SSE3);
438 if (!type_info.IsInteger32()) { 438 // Check whether the exponent is too big for a 64 bit signed integer.
439 // Check whether the exponent is too big for a 64 bit signed integer. 439 static const uint32_t kTooBigExponent =
440 static const uint32_t kTooBigExponent = 440 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
441 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift; 441 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
442 __ cmp(Operand(scratch2), Immediate(kTooBigExponent)); 442 __ j(greater_equal, conversion_failure);
443 __ j(greater_equal, conversion_failure);
444 }
445 // Load x87 register with heap number. 443 // Load x87 register with heap number.
446 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset)); 444 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
447 // Reserve space for 64 bit answer. 445 // Reserve space for 64 bit answer.
448 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint. 446 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
449 // Do conversion, which cannot fail because we checked the exponent. 447 // Do conversion, which cannot fail because we checked the exponent.
450 __ fisttp_d(Operand(esp, 0)); 448 __ fisttp_d(Operand(esp, 0));
451 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx. 449 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
452 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint. 450 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
453 } else { 451 } else {
454 // Load ecx with zero. We use this either for the final shift or 452 // Load ecx with zero. We use this either for the final shift or
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
533 // it's probably slower to test than just to do it. 531 // it's probably slower to test than just to do it.
534 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset)); 532 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
535 // Shift down 22 bits to get the most significant 10 bits or the low 533 // Shift down 22 bits to get the most significant 10 bits or the low
536 // mantissa word. 534 // mantissa word.
537 __ shr(scratch2, 32 - shift_distance); 535 __ shr(scratch2, 32 - shift_distance);
538 __ or_(scratch2, Operand(scratch)); 536 __ or_(scratch2, Operand(scratch));
539 // Move down according to the exponent. 537 // Move down according to the exponent.
540 __ shr_cl(scratch2); 538 __ shr_cl(scratch2);
541 // Now the unsigned answer is in scratch2. We need to move it to ecx and 539 // Now the unsigned answer is in scratch2. We need to move it to ecx and
542 // we may need to fix the sign. 540 // we may need to fix the sign.
543 NearLabel negative; 541 Label negative;
544 __ xor_(ecx, Operand(ecx)); 542 __ xor_(ecx, Operand(ecx));
545 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset)); 543 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
546 __ j(greater, &negative); 544 __ j(greater, &negative, Label::kNear);
547 __ mov(ecx, scratch2); 545 __ mov(ecx, scratch2);
548 __ jmp(&done); 546 __ jmp(&done, Label::kNear);
549 __ bind(&negative); 547 __ bind(&negative);
550 __ sub(ecx, Operand(scratch2)); 548 __ sub(ecx, Operand(scratch2));
551 __ bind(&done); 549 __ bind(&done);
552 } 550 }
553 } 551 }
554 552
555 553
556 Handle<Code> GetTypeRecordingUnaryOpStub(int key, 554 Handle<Code> GetTypeRecordingUnaryOpStub(int key,
557 TRUnaryOpIC::TypeInfo type_info) { 555 TRUnaryOpIC::TypeInfo type_info) {
558 TypeRecordingUnaryOpStub stub(key, type_info); 556 TypeRecordingUnaryOpStub stub(key, type_info);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
632 case Token::BIT_NOT: 630 case Token::BIT_NOT:
633 GenerateSmiStubBitNot(masm); 631 GenerateSmiStubBitNot(masm);
634 break; 632 break;
635 default: 633 default:
636 UNREACHABLE(); 634 UNREACHABLE();
637 } 635 }
638 } 636 }
639 637
640 638
641 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { 639 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
642 NearLabel non_smi; 640 Label non_smi, undo, slow;
643 Label undo, slow; 641 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
644 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow); 642 Label::kNear, Label::kNear, Label::kNear);
645 __ bind(&undo); 643 __ bind(&undo);
646 GenerateSmiCodeUndo(masm); 644 GenerateSmiCodeUndo(masm);
647 __ bind(&non_smi); 645 __ bind(&non_smi);
648 __ bind(&slow); 646 __ bind(&slow);
649 GenerateTypeTransition(masm); 647 GenerateTypeTransition(masm);
650 } 648 }
651 649
652 650
653 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { 651 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
654 NearLabel non_smi; 652 Label non_smi;
655 GenerateSmiCodeBitNot(masm, &non_smi); 653 GenerateSmiCodeBitNot(masm, &non_smi);
656 __ bind(&non_smi); 654 __ bind(&non_smi);
657 GenerateTypeTransition(masm); 655 GenerateTypeTransition(masm);
658 } 656 }
659 657
660 658
661 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, 659 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
662 NearLabel* non_smi, 660 Label* non_smi,
663 Label* undo, 661 Label* undo,
664 Label* slow) { 662 Label* slow,
663 Label::Distance non_smi_near,
664 Label::Distance undo_near,
665 Label::Distance slow_near) {
665 // Check whether the value is a smi. 666 // Check whether the value is a smi.
666 __ test(eax, Immediate(kSmiTagMask)); 667 __ test(eax, Immediate(kSmiTagMask));
667 __ j(not_zero, non_smi); 668 __ j(not_zero, non_smi, non_smi_near);
668 669
669 // We can't handle -0 with smis, so use a type transition for that case. 670 // We can't handle -0 with smis, so use a type transition for that case.
670 __ test(eax, Operand(eax)); 671 __ test(eax, Operand(eax));
671 __ j(zero, slow); 672 __ j(zero, slow, slow_near);
672 673
673 // Try optimistic subtraction '0 - value', saving operand in eax for undo. 674 // Try optimistic subtraction '0 - value', saving operand in eax for undo.
674 __ mov(edx, Operand(eax)); 675 __ mov(edx, Operand(eax));
675 __ Set(eax, Immediate(0)); 676 __ Set(eax, Immediate(0));
676 __ sub(eax, Operand(edx)); 677 __ sub(eax, Operand(edx));
677 __ j(overflow, undo); 678 __ j(overflow, undo, undo_near);
678 __ ret(0); 679 __ ret(0);
679 } 680 }
680 681
681 682
682 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, 683 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(
683 NearLabel* non_smi) { 684 MacroAssembler* masm,
685 Label* non_smi,
686 Label::Distance non_smi_near) {
684 // Check whether the value is a smi. 687 // Check whether the value is a smi.
685 __ test(eax, Immediate(kSmiTagMask)); 688 __ test(eax, Immediate(kSmiTagMask));
686 __ j(not_zero, non_smi); 689 __ j(not_zero, non_smi, non_smi_near);
687 690
688 // Flip bits and revert inverted smi-tag. 691 // Flip bits and revert inverted smi-tag.
689 __ not_(eax); 692 __ not_(eax);
690 __ and_(eax, ~kSmiTagMask); 693 __ and_(eax, ~kSmiTagMask);
691 __ ret(0); 694 __ ret(0);
692 } 695 }
693 696
694 697
695 void TypeRecordingUnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) { 698 void TypeRecordingUnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
696 __ mov(eax, Operand(edx)); 699 __ mov(eax, Operand(edx));
697 } 700 }
698 701
699 702
700 // TODO(svenpanne): Use virtual functions instead of switch. 703 // TODO(svenpanne): Use virtual functions instead of switch.
701 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 704 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
702 switch (op_) { 705 switch (op_) {
703 case Token::SUB: 706 case Token::SUB:
704 GenerateHeapNumberStubSub(masm); 707 GenerateHeapNumberStubSub(masm);
705 break; 708 break;
706 case Token::BIT_NOT: 709 case Token::BIT_NOT:
707 GenerateHeapNumberStubBitNot(masm); 710 GenerateHeapNumberStubBitNot(masm);
708 break; 711 break;
709 default: 712 default:
710 UNREACHABLE(); 713 UNREACHABLE();
711 } 714 }
712 } 715 }
713 716
714 717
715 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { 718 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
716 NearLabel non_smi; 719 Label non_smi, undo, slow, call_builtin;
717 Label undo, slow; 720 GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
718 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow);
719 __ bind(&non_smi); 721 __ bind(&non_smi);
720 GenerateHeapNumberCodeSub(masm, &slow); 722 GenerateHeapNumberCodeSub(masm, &slow);
721 __ bind(&undo); 723 __ bind(&undo);
722 GenerateSmiCodeUndo(masm); 724 GenerateSmiCodeUndo(masm);
723 __ bind(&slow); 725 __ bind(&slow);
724 GenerateTypeTransition(masm); 726 GenerateTypeTransition(masm);
727 __ bind(&call_builtin);
728 GenerateGenericCodeFallback(masm);
725 } 729 }
726 730
727 731
728 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( 732 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot(
729 MacroAssembler* masm) { 733 MacroAssembler* masm) {
730 NearLabel non_smi; 734 Label non_smi, slow;
731 Label slow; 735 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
732 GenerateSmiCodeBitNot(masm, &non_smi);
733 __ bind(&non_smi); 736 __ bind(&non_smi);
734 GenerateHeapNumberCodeBitNot(masm, &slow); 737 GenerateHeapNumberCodeBitNot(masm, &slow);
735 __ bind(&slow); 738 __ bind(&slow);
736 GenerateTypeTransition(masm); 739 GenerateTypeTransition(masm);
737 } 740 }
738 741
739 742
740 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, 743 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
741 Label* slow) { 744 Label* slow) {
742 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 745 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
774 777
775 778
776 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( 779 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot(
777 MacroAssembler* masm, 780 MacroAssembler* masm,
778 Label* slow) { 781 Label* slow) {
779 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 782 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
780 __ cmp(edx, masm->isolate()->factory()->heap_number_map()); 783 __ cmp(edx, masm->isolate()->factory()->heap_number_map());
781 __ j(not_equal, slow); 784 __ j(not_equal, slow);
782 785
783 // Convert the heap number in eax to an untagged integer in ecx. 786 // Convert the heap number in eax to an untagged integer in ecx.
784 IntegerConvert(masm, eax, TypeInfo::Unknown(), CpuFeatures::IsSupported(SSE3), 787 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
785 slow);
786 788
787 // Do the bitwise operation and check if the result fits in a smi. 789 // Do the bitwise operation and check if the result fits in a smi.
788 NearLabel try_float; 790 Label try_float;
789 __ not_(ecx); 791 __ not_(ecx);
790 __ cmp(ecx, 0xc0000000); 792 __ cmp(ecx, 0xc0000000);
791 __ j(sign, &try_float); 793 __ j(sign, &try_float, Label::kNear);
792 794
793 // Tag the result as a smi and we're done. 795 // Tag the result as a smi and we're done.
794 STATIC_ASSERT(kSmiTagSize == 1); 796 STATIC_ASSERT(kSmiTagSize == 1);
795 __ lea(eax, Operand(ecx, times_2, kSmiTag)); 797 __ lea(eax, Operand(ecx, times_2, kSmiTag));
796 __ ret(0); 798 __ ret(0);
797 799
798 // Try to store the result in a heap number. 800 // Try to store the result in a heap number.
799 __ bind(&try_float); 801 __ bind(&try_float);
800 if (mode_ == UNARY_NO_OVERWRITE) { 802 if (mode_ == UNARY_NO_OVERWRITE) {
801 Label slow_allocate_heapnumber, heapnumber_allocated; 803 Label slow_allocate_heapnumber, heapnumber_allocated;
802 __ mov(ebx, eax); 804 __ mov(ebx, eax);
803 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber); 805 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
804 __ jmp(&heapnumber_allocated); 806 __ jmp(&heapnumber_allocated);
805 807
806 __ bind(&slow_allocate_heapnumber); 808 __ bind(&slow_allocate_heapnumber);
807 __ EnterInternalFrame(); 809 __ EnterInternalFrame();
808 // Push the original HeapNumber on the stack. The integer value can't 810 // Push the original HeapNumber on the stack. The integer value can't
809 // be stored since it's untagged and not in the smi range (so we can't 811 // be stored since it's untagged and not in the smi range (so we can't
810 // smi-tag it). We'll recalculate the value after the GC instead. 812 // smi-tag it). We'll recalculate the value after the GC instead.
811 __ push(ebx); 813 __ push(ebx);
812 __ CallRuntime(Runtime::kNumberAlloc, 0); 814 __ CallRuntime(Runtime::kNumberAlloc, 0);
813 // New HeapNumber is in eax. 815 // New HeapNumber is in eax.
814 __ pop(edx); 816 __ pop(edx);
815 __ LeaveInternalFrame(); 817 __ LeaveInternalFrame();
816 // IntegerConvert uses ebx and edi as scratch registers. 818 // IntegerConvert uses ebx and edi as scratch registers.
817 // This conversion won't go slow-case. 819 // This conversion won't go slow-case.
818 IntegerConvert(masm, edx, TypeInfo::Unknown(), 820 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
819 CpuFeatures::IsSupported(SSE3), slow);
820 __ not_(ecx); 821 __ not_(ecx);
821 822
822 __ bind(&heapnumber_allocated); 823 __ bind(&heapnumber_allocated);
823 } 824 }
824 if (CpuFeatures::IsSupported(SSE2)) { 825 if (CpuFeatures::IsSupported(SSE2)) {
825 CpuFeatures::Scope use_sse2(SSE2); 826 CpuFeatures::Scope use_sse2(SSE2);
826 __ cvtsi2sd(xmm0, Operand(ecx)); 827 __ cvtsi2sd(xmm0, Operand(ecx));
827 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 828 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
828 } else { 829 } else {
829 __ push(ecx); 830 __ push(ecx);
(...skipping 14 matching lines...) Expand all
844 case Token::BIT_NOT: 845 case Token::BIT_NOT:
845 GenerateGenericStubBitNot(masm); 846 GenerateGenericStubBitNot(masm);
846 break; 847 break;
847 default: 848 default:
848 UNREACHABLE(); 849 UNREACHABLE();
849 } 850 }
850 } 851 }
851 852
852 853
853 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { 854 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
854 NearLabel non_smi; 855 Label non_smi, undo, slow;
855 Label undo, slow; 856 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
856 GenerateSmiCodeSub(masm, &non_smi, &undo, &slow);
857 __ bind(&non_smi); 857 __ bind(&non_smi);
858 GenerateHeapNumberCodeSub(masm, &slow); 858 GenerateHeapNumberCodeSub(masm, &slow);
859 __ bind(&undo); 859 __ bind(&undo);
860 GenerateSmiCodeUndo(masm); 860 GenerateSmiCodeUndo(masm);
861 __ bind(&slow); 861 __ bind(&slow);
862 GenerateGenericCodeFallback(masm); 862 GenerateGenericCodeFallback(masm);
863 } 863 }
864 864
865 865
866 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { 866 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
867 NearLabel non_smi; 867 Label non_smi, slow;
868 Label slow; 868 GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
869 GenerateSmiCodeBitNot(masm, &non_smi);
870 __ bind(&non_smi); 869 __ bind(&non_smi);
871 GenerateHeapNumberCodeBitNot(masm, &slow); 870 GenerateHeapNumberCodeBitNot(masm, &slow);
872 __ bind(&slow); 871 __ bind(&slow);
873 GenerateGenericCodeFallback(masm); 872 GenerateGenericCodeFallback(masm);
874 } 873 }
875 874
876 875
877 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( 876 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback(
878 MacroAssembler* masm) { 877 MacroAssembler* masm) {
879 // Handle the slow case by jumping to the corresponding JavaScript builtin. 878 // Handle the slow case by jumping to the corresponding JavaScript builtin.
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
1057 combined = right; 1056 combined = right;
1058 break; 1057 break;
1059 1058
1060 default: 1059 default:
1061 break; 1060 break;
1062 } 1061 }
1063 1062
1064 // 3. Perform the smi check of the operands. 1063 // 3. Perform the smi check of the operands.
1065 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case. 1064 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
1066 __ test(combined, Immediate(kSmiTagMask)); 1065 __ test(combined, Immediate(kSmiTagMask));
1067 __ j(not_zero, &not_smis, not_taken); 1066 __ j(not_zero, &not_smis);
1068 1067
1069 // 4. Operands are both smis, perform the operation leaving the result in 1068 // 4. Operands are both smis, perform the operation leaving the result in
1070 // eax and check the result if necessary. 1069 // eax and check the result if necessary.
1071 Comment perform_smi(masm, "-- Perform smi operation"); 1070 Comment perform_smi(masm, "-- Perform smi operation");
1072 Label use_fp_on_smis; 1071 Label use_fp_on_smis;
1073 switch (op_) { 1072 switch (op_) {
1074 case Token::BIT_OR: 1073 case Token::BIT_OR:
1075 // Nothing to do. 1074 // Nothing to do.
1076 break; 1075 break;
1077 1076
1078 case Token::BIT_XOR: 1077 case Token::BIT_XOR:
1079 ASSERT(right.is(eax)); 1078 ASSERT(right.is(eax));
1080 __ xor_(right, Operand(left)); // Bitwise xor is commutative. 1079 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
1081 break; 1080 break;
1082 1081
1083 case Token::BIT_AND: 1082 case Token::BIT_AND:
1084 ASSERT(right.is(eax)); 1083 ASSERT(right.is(eax));
1085 __ and_(right, Operand(left)); // Bitwise and is commutative. 1084 __ and_(right, Operand(left)); // Bitwise and is commutative.
1086 break; 1085 break;
1087 1086
1088 case Token::SHL: 1087 case Token::SHL:
1089 // Remove tags from operands (but keep sign). 1088 // Remove tags from operands (but keep sign).
1090 __ SmiUntag(left); 1089 __ SmiUntag(left);
1091 __ SmiUntag(ecx); 1090 __ SmiUntag(ecx);
1092 // Perform the operation. 1091 // Perform the operation.
1093 __ shl_cl(left); 1092 __ shl_cl(left);
1094 // Check that the *signed* result fits in a smi. 1093 // Check that the *signed* result fits in a smi.
1095 __ cmp(left, 0xc0000000); 1094 __ cmp(left, 0xc0000000);
1096 __ j(sign, &use_fp_on_smis, not_taken); 1095 __ j(sign, &use_fp_on_smis);
1097 // Tag the result and store it in register eax. 1096 // Tag the result and store it in register eax.
1098 __ SmiTag(left); 1097 __ SmiTag(left);
1099 __ mov(eax, left); 1098 __ mov(eax, left);
1100 break; 1099 break;
1101 1100
1102 case Token::SAR: 1101 case Token::SAR:
1103 // Remove tags from operands (but keep sign). 1102 // Remove tags from operands (but keep sign).
1104 __ SmiUntag(left); 1103 __ SmiUntag(left);
1105 __ SmiUntag(ecx); 1104 __ SmiUntag(ecx);
1106 // Perform the operation. 1105 // Perform the operation.
1107 __ sar_cl(left); 1106 __ sar_cl(left);
1108 // Tag the result and store it in register eax. 1107 // Tag the result and store it in register eax.
1109 __ SmiTag(left); 1108 __ SmiTag(left);
1110 __ mov(eax, left); 1109 __ mov(eax, left);
1111 break; 1110 break;
1112 1111
1113 case Token::SHR: 1112 case Token::SHR:
1114 // Remove tags from operands (but keep sign). 1113 // Remove tags from operands (but keep sign).
1115 __ SmiUntag(left); 1114 __ SmiUntag(left);
1116 __ SmiUntag(ecx); 1115 __ SmiUntag(ecx);
1117 // Perform the operation. 1116 // Perform the operation.
1118 __ shr_cl(left); 1117 __ shr_cl(left);
1119 // Check that the *unsigned* result fits in a smi. 1118 // Check that the *unsigned* result fits in a smi.
1120 // Neither of the two high-order bits can be set: 1119 // Neither of the two high-order bits can be set:
1121 // - 0x80000000: high bit would be lost when smi tagging. 1120 // - 0x80000000: high bit would be lost when smi tagging.
1122 // - 0x40000000: this number would convert to negative when 1121 // - 0x40000000: this number would convert to negative when
1123 // Smi tagging these two cases can only happen with shifts 1122 // Smi tagging these two cases can only happen with shifts
1124 // by 0 or 1 when handed a valid smi. 1123 // by 0 or 1 when handed a valid smi.
1125 __ test(left, Immediate(0xc0000000)); 1124 __ test(left, Immediate(0xc0000000));
1126 __ j(not_zero, slow, not_taken); 1125 __ j(not_zero, &use_fp_on_smis);
1127 // Tag the result and store it in register eax. 1126 // Tag the result and store it in register eax.
1128 __ SmiTag(left); 1127 __ SmiTag(left);
1129 __ mov(eax, left); 1128 __ mov(eax, left);
1130 break; 1129 break;
1131 1130
1132 case Token::ADD: 1131 case Token::ADD:
1133 ASSERT(right.is(eax)); 1132 ASSERT(right.is(eax));
1134 __ add(right, Operand(left)); // Addition is commutative. 1133 __ add(right, Operand(left)); // Addition is commutative.
1135 __ j(overflow, &use_fp_on_smis, not_taken); 1134 __ j(overflow, &use_fp_on_smis);
1136 break; 1135 break;
1137 1136
1138 case Token::SUB: 1137 case Token::SUB:
1139 __ sub(left, Operand(right)); 1138 __ sub(left, Operand(right));
1140 __ j(overflow, &use_fp_on_smis, not_taken); 1139 __ j(overflow, &use_fp_on_smis);
1141 __ mov(eax, left); 1140 __ mov(eax, left);
1142 break; 1141 break;
1143 1142
1144 case Token::MUL: 1143 case Token::MUL:
1145 // If the smi tag is 0 we can just leave the tag on one operand. 1144 // If the smi tag is 0 we can just leave the tag on one operand.
1146 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case. 1145 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1147 // We can't revert the multiplication if the result is not a smi 1146 // We can't revert the multiplication if the result is not a smi
1148 // so save the right operand. 1147 // so save the right operand.
1149 __ mov(ebx, right); 1148 __ mov(ebx, right);
1150 // Remove tag from one of the operands (but keep sign). 1149 // Remove tag from one of the operands (but keep sign).
1151 __ SmiUntag(right); 1150 __ SmiUntag(right);
1152 // Do multiplication. 1151 // Do multiplication.
1153 __ imul(right, Operand(left)); // Multiplication is commutative. 1152 __ imul(right, Operand(left)); // Multiplication is commutative.
1154 __ j(overflow, &use_fp_on_smis, not_taken); 1153 __ j(overflow, &use_fp_on_smis);
1155 // Check for negative zero result. Use combined = left | right. 1154 // Check for negative zero result. Use combined = left | right.
1156 __ NegativeZeroTest(right, combined, &use_fp_on_smis); 1155 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
1157 break; 1156 break;
1158 1157
1159 case Token::DIV: 1158 case Token::DIV:
1160 // We can't revert the division if the result is not a smi so 1159 // We can't revert the division if the result is not a smi so
1161 // save the left operand. 1160 // save the left operand.
1162 __ mov(edi, left); 1161 __ mov(edi, left);
1163 // Check for 0 divisor. 1162 // Check for 0 divisor.
1164 __ test(right, Operand(right)); 1163 __ test(right, Operand(right));
1165 __ j(zero, &use_fp_on_smis, not_taken); 1164 __ j(zero, &use_fp_on_smis);
1166 // Sign extend left into edx:eax. 1165 // Sign extend left into edx:eax.
1167 ASSERT(left.is(eax)); 1166 ASSERT(left.is(eax));
1168 __ cdq(); 1167 __ cdq();
1169 // Divide edx:eax by right. 1168 // Divide edx:eax by right.
1170 __ idiv(right); 1169 __ idiv(right);
1171 // Check for the corner case of dividing the most negative smi by 1170 // Check for the corner case of dividing the most negative smi by
1172 // -1. We cannot use the overflow flag, since it is not set by idiv 1171 // -1. We cannot use the overflow flag, since it is not set by idiv
1173 // instruction. 1172 // instruction.
1174 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 1173 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1175 __ cmp(eax, 0x40000000); 1174 __ cmp(eax, 0x40000000);
1176 __ j(equal, &use_fp_on_smis); 1175 __ j(equal, &use_fp_on_smis);
1177 // Check for negative zero result. Use combined = left | right. 1176 // Check for negative zero result. Use combined = left | right.
1178 __ NegativeZeroTest(eax, combined, &use_fp_on_smis); 1177 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
1179 // Check that the remainder is zero. 1178 // Check that the remainder is zero.
1180 __ test(edx, Operand(edx)); 1179 __ test(edx, Operand(edx));
1181 __ j(not_zero, &use_fp_on_smis); 1180 __ j(not_zero, &use_fp_on_smis);
1182 // Tag the result and store it in register eax. 1181 // Tag the result and store it in register eax.
1183 __ SmiTag(eax); 1182 __ SmiTag(eax);
1184 break; 1183 break;
1185 1184
1186 case Token::MOD: 1185 case Token::MOD:
1187 // Check for 0 divisor. 1186 // Check for 0 divisor.
1188 __ test(right, Operand(right)); 1187 __ test(right, Operand(right));
1189 __ j(zero, &not_smis, not_taken); 1188 __ j(zero, &not_smis);
1190 1189
1191 // Sign extend left into edx:eax. 1190 // Sign extend left into edx:eax.
1192 ASSERT(left.is(eax)); 1191 ASSERT(left.is(eax));
1193 __ cdq(); 1192 __ cdq();
1194 // Divide edx:eax by right. 1193 // Divide edx:eax by right.
1195 __ idiv(right); 1194 __ idiv(right);
1196 // Check for negative zero result. Use combined = left | right. 1195 // Check for negative zero result. Use combined = left | right.
1197 __ NegativeZeroTest(edx, combined, slow); 1196 __ NegativeZeroTest(edx, combined, slow);
1198 // Move remainder to register eax. 1197 // Move remainder to register eax.
1199 __ mov(eax, edx); 1198 __ mov(eax, edx);
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1252 __ mov(eax, edi); 1251 __ mov(eax, edi);
1253 break; 1252 break;
1254 default: 1253 default:
1255 // No other operators jump to use_fp_on_smis. 1254 // No other operators jump to use_fp_on_smis.
1256 break; 1255 break;
1257 } 1256 }
1258 __ jmp(&not_smis); 1257 __ jmp(&not_smis);
1259 } else { 1258 } else {
1260 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS); 1259 ASSERT(allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS);
1261 switch (op_) { 1260 switch (op_) {
1262 case Token::SHL: { 1261 case Token::SHL:
1262 case Token::SHR: {
1263 Comment perform_float(masm, "-- Perform float operation on smis"); 1263 Comment perform_float(masm, "-- Perform float operation on smis");
1264 __ bind(&use_fp_on_smis); 1264 __ bind(&use_fp_on_smis);
1265 // Result we want is in left == edx, so we can put the allocated heap 1265 // Result we want is in left == edx, so we can put the allocated heap
1266 // number in eax. 1266 // number in eax.
1267 __ AllocateHeapNumber(eax, ecx, ebx, slow); 1267 __ AllocateHeapNumber(eax, ecx, ebx, slow);
1268 // Store the result in the HeapNumber and return. 1268 // Store the result in the HeapNumber and return.
1269 if (CpuFeatures::IsSupported(SSE2)) { 1269 // It's OK to overwrite the arguments on the stack because we
1270 CpuFeatures::Scope use_sse2(SSE2); 1270 // are about to return.
1271 __ cvtsi2sd(xmm0, Operand(left)); 1271 if (op_ == Token::SHR) {
1272 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 1272 __ mov(Operand(esp, 1 * kPointerSize), left);
1273 __ mov(Operand(esp, 2 * kPointerSize), Immediate(0));
1274 __ fild_d(Operand(esp, 1 * kPointerSize));
1275 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1273 } else { 1276 } else {
1274 // It's OK to overwrite the right argument on the stack because we 1277 ASSERT_EQ(Token::SHL, op_);
1275 // are about to return. 1278 if (CpuFeatures::IsSupported(SSE2)) {
1276 __ mov(Operand(esp, 1 * kPointerSize), left); 1279 CpuFeatures::Scope use_sse2(SSE2);
1277 __ fild_s(Operand(esp, 1 * kPointerSize)); 1280 __ cvtsi2sd(xmm0, Operand(left));
1278 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); 1281 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
1282 } else {
1283 __ mov(Operand(esp, 1 * kPointerSize), left);
1284 __ fild_s(Operand(esp, 1 * kPointerSize));
1285 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
1286 }
1279 } 1287 }
1280 __ ret(2 * kPointerSize); 1288 __ ret(2 * kPointerSize);
1281 break; 1289 break;
1282 } 1290 }
1283 1291
1284 case Token::ADD: 1292 case Token::ADD:
1285 case Token::SUB: 1293 case Token::SUB:
1286 case Token::MUL: 1294 case Token::MUL:
1287 case Token::DIV: { 1295 case Token::DIV: {
1288 Comment perform_float(masm, "-- Perform float operation on smis"); 1296 Comment perform_float(masm, "-- Perform float operation on smis");
1289 __ bind(&use_fp_on_smis); 1297 __ bind(&use_fp_on_smis);
1290 // Restore arguments to edx, eax. 1298 // Restore arguments to edx, eax.
1291 switch (op_) { 1299 switch (op_) {
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after
1571 // Tag smi result and return. 1579 // Tag smi result and return.
1572 __ SmiTag(eax); 1580 __ SmiTag(eax);
1573 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. 1581 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1574 1582
1575 // All ops except SHR return a signed int32 that we load in 1583 // All ops except SHR return a signed int32 that we load in
1576 // a HeapNumber. 1584 // a HeapNumber.
1577 if (op_ != Token::SHR) { 1585 if (op_ != Token::SHR) {
1578 __ bind(&non_smi_result); 1586 __ bind(&non_smi_result);
1579 // Allocate a heap number if needed. 1587 // Allocate a heap number if needed.
1580 __ mov(ebx, Operand(eax)); // ebx: result 1588 __ mov(ebx, Operand(eax)); // ebx: result
1581 NearLabel skip_allocation; 1589 Label skip_allocation;
1582 switch (mode_) { 1590 switch (mode_) {
1583 case OVERWRITE_LEFT: 1591 case OVERWRITE_LEFT:
1584 case OVERWRITE_RIGHT: 1592 case OVERWRITE_RIGHT:
1585 // If the operand was an object, we skip the 1593 // If the operand was an object, we skip the
1586 // allocation of a heap number. 1594 // allocation of a heap number.
1587 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? 1595 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1588 1 * kPointerSize : 2 * kPointerSize)); 1596 1 * kPointerSize : 2 * kPointerSize));
1589 __ test(eax, Immediate(kSmiTagMask)); 1597 __ test(eax, Immediate(kSmiTagMask));
1590 __ j(not_zero, &skip_allocation, not_taken); 1598 __ j(not_zero, &skip_allocation, Label::kNear);
1591 // Fall through! 1599 // Fall through!
1592 case NO_OVERWRITE: 1600 case NO_OVERWRITE:
1593 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 1601 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1594 __ bind(&skip_allocation); 1602 __ bind(&skip_allocation);
1595 break; 1603 break;
1596 default: UNREACHABLE(); 1604 default: UNREACHABLE();
1597 } 1605 }
1598 // Store the result in the HeapNumber and return. 1606 // Store the result in the HeapNumber and return.
1599 if (CpuFeatures::IsSupported(SSE2)) { 1607 if (CpuFeatures::IsSupported(SSE2)) {
1600 CpuFeatures::Scope use_sse2(SSE2); 1608 CpuFeatures::Scope use_sse2(SSE2);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1659 case Token::SHR: 1667 case Token::SHR:
1660 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 1668 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
1661 break; 1669 break;
1662 default: 1670 default:
1663 UNREACHABLE(); 1671 UNREACHABLE();
1664 } 1672 }
1665 } 1673 }
1666 1674
1667 1675
1668 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { 1676 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1669 Label call_runtime;
1670
1671 if (op_ == Token::ADD) { 1677 if (op_ == Token::ADD) {
1672 // Handle string addition here, because it is the only operation 1678 // Handle string addition here, because it is the only operation
1673 // that does not do a ToNumber conversion on the operands. 1679 // that does not do a ToNumber conversion on the operands.
1674 GenerateAddStrings(masm); 1680 GenerateAddStrings(masm);
1675 } 1681 }
1676 1682
1677 Factory* factory = masm->isolate()->factory(); 1683 Factory* factory = masm->isolate()->factory();
1678 1684
1679 // Convert odd ball arguments to numbers. 1685 // Convert odd ball arguments to numbers.
1680 NearLabel check, done; 1686 Label check, done;
1681 __ cmp(edx, factory->undefined_value()); 1687 __ cmp(edx, factory->undefined_value());
1682 __ j(not_equal, &check); 1688 __ j(not_equal, &check, Label::kNear);
1683 if (Token::IsBitOp(op_)) { 1689 if (Token::IsBitOp(op_)) {
1684 __ xor_(edx, Operand(edx)); 1690 __ xor_(edx, Operand(edx));
1685 } else { 1691 } else {
1686 __ mov(edx, Immediate(factory->nan_value())); 1692 __ mov(edx, Immediate(factory->nan_value()));
1687 } 1693 }
1688 __ jmp(&done); 1694 __ jmp(&done, Label::kNear);
1689 __ bind(&check); 1695 __ bind(&check);
1690 __ cmp(eax, factory->undefined_value()); 1696 __ cmp(eax, factory->undefined_value());
1691 __ j(not_equal, &done); 1697 __ j(not_equal, &done, Label::kNear);
1692 if (Token::IsBitOp(op_)) { 1698 if (Token::IsBitOp(op_)) {
1693 __ xor_(eax, Operand(eax)); 1699 __ xor_(eax, Operand(eax));
1694 } else { 1700 } else {
1695 __ mov(eax, Immediate(factory->nan_value())); 1701 __ mov(eax, Immediate(factory->nan_value()));
1696 } 1702 }
1697 __ bind(&done); 1703 __ bind(&done);
1698 1704
1699 GenerateHeapNumberStub(masm); 1705 GenerateHeapNumberStub(masm);
1700 } 1706 }
1701 1707
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1788 // Tag smi result and return. 1794 // Tag smi result and return.
1789 __ SmiTag(eax); 1795 __ SmiTag(eax);
1790 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack. 1796 __ ret(2 * kPointerSize); // Drop two pushed arguments from the stack.
1791 1797
1792 // All ops except SHR return a signed int32 that we load in 1798 // All ops except SHR return a signed int32 that we load in
1793 // a HeapNumber. 1799 // a HeapNumber.
1794 if (op_ != Token::SHR) { 1800 if (op_ != Token::SHR) {
1795 __ bind(&non_smi_result); 1801 __ bind(&non_smi_result);
1796 // Allocate a heap number if needed. 1802 // Allocate a heap number if needed.
1797 __ mov(ebx, Operand(eax)); // ebx: result 1803 __ mov(ebx, Operand(eax)); // ebx: result
1798 NearLabel skip_allocation; 1804 Label skip_allocation;
1799 switch (mode_) { 1805 switch (mode_) {
1800 case OVERWRITE_LEFT: 1806 case OVERWRITE_LEFT:
1801 case OVERWRITE_RIGHT: 1807 case OVERWRITE_RIGHT:
1802 // If the operand was an object, we skip the 1808 // If the operand was an object, we skip the
1803 // allocation of a heap number. 1809 // allocation of a heap number.
1804 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? 1810 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
1805 1 * kPointerSize : 2 * kPointerSize)); 1811 1 * kPointerSize : 2 * kPointerSize));
1806 __ test(eax, Immediate(kSmiTagMask)); 1812 __ test(eax, Immediate(kSmiTagMask));
1807 __ j(not_zero, &skip_allocation, not_taken); 1813 __ j(not_zero, &skip_allocation, Label::kNear);
1808 // Fall through! 1814 // Fall through!
1809 case NO_OVERWRITE: 1815 case NO_OVERWRITE:
1810 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 1816 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
1811 __ bind(&skip_allocation); 1817 __ bind(&skip_allocation);
1812 break; 1818 break;
1813 default: UNREACHABLE(); 1819 default: UNREACHABLE();
1814 } 1820 }
1815 // Store the result in the HeapNumber and return. 1821 // Store the result in the HeapNumber and return.
1816 if (CpuFeatures::IsSupported(SSE2)) { 1822 if (CpuFeatures::IsSupported(SSE2)) {
1817 CpuFeatures::Scope use_sse2(SSE2); 1823 CpuFeatures::Scope use_sse2(SSE2);
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
1988 // Tag smi result and return. 1994 // Tag smi result and return.
1989 __ SmiTag(eax); 1995 __ SmiTag(eax);
1990 __ ret(2 * kPointerSize); // Drop the arguments from the stack. 1996 __ ret(2 * kPointerSize); // Drop the arguments from the stack.
1991 1997
1992 // All ops except SHR return a signed int32 that we load in 1998 // All ops except SHR return a signed int32 that we load in
1993 // a HeapNumber. 1999 // a HeapNumber.
1994 if (op_ != Token::SHR) { 2000 if (op_ != Token::SHR) {
1995 __ bind(&non_smi_result); 2001 __ bind(&non_smi_result);
1996 // Allocate a heap number if needed. 2002 // Allocate a heap number if needed.
1997 __ mov(ebx, Operand(eax)); // ebx: result 2003 __ mov(ebx, Operand(eax)); // ebx: result
1998 NearLabel skip_allocation; 2004 Label skip_allocation;
1999 switch (mode_) { 2005 switch (mode_) {
2000 case OVERWRITE_LEFT: 2006 case OVERWRITE_LEFT:
2001 case OVERWRITE_RIGHT: 2007 case OVERWRITE_RIGHT:
2002 // If the operand was an object, we skip the 2008 // If the operand was an object, we skip the
2003 // allocation of a heap number. 2009 // allocation of a heap number.
2004 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ? 2010 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
2005 1 * kPointerSize : 2 * kPointerSize)); 2011 1 * kPointerSize : 2 * kPointerSize));
2006 __ test(eax, Immediate(kSmiTagMask)); 2012 __ test(eax, Immediate(kSmiTagMask));
2007 __ j(not_zero, &skip_allocation, not_taken); 2013 __ j(not_zero, &skip_allocation, Label::kNear);
2008 // Fall through! 2014 // Fall through!
2009 case NO_OVERWRITE: 2015 case NO_OVERWRITE:
2010 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime); 2016 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
2011 __ bind(&skip_allocation); 2017 __ bind(&skip_allocation);
2012 break; 2018 break;
2013 default: UNREACHABLE(); 2019 default: UNREACHABLE();
2014 } 2020 }
2015 // Store the result in the HeapNumber and return. 2021 // Store the result in the HeapNumber and return.
2016 if (CpuFeatures::IsSupported(SSE2)) { 2022 if (CpuFeatures::IsSupported(SSE2)) {
2017 CpuFeatures::Scope use_sse2(SSE2); 2023 CpuFeatures::Scope use_sse2(SSE2);
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
2073 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); 2079 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
2074 break; 2080 break;
2075 default: 2081 default:
2076 UNREACHABLE(); 2082 UNREACHABLE();
2077 } 2083 }
2078 } 2084 }
2079 2085
2080 2086
2081 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { 2087 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2082 ASSERT(op_ == Token::ADD); 2088 ASSERT(op_ == Token::ADD);
2083 NearLabel left_not_string, call_runtime; 2089 Label left_not_string, call_runtime;
2084 2090
2085 // Registers containing left and right operands respectively. 2091 // Registers containing left and right operands respectively.
2086 Register left = edx; 2092 Register left = edx;
2087 Register right = eax; 2093 Register right = eax;
2088 2094
2089 // Test if left operand is a string. 2095 // Test if left operand is a string.
2090 __ test(left, Immediate(kSmiTagMask)); 2096 __ test(left, Immediate(kSmiTagMask));
2091 __ j(zero, &left_not_string); 2097 __ j(zero, &left_not_string, Label::kNear);
2092 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); 2098 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
2093 __ j(above_equal, &left_not_string); 2099 __ j(above_equal, &left_not_string, Label::kNear);
2094 2100
2095 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); 2101 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB);
2096 GenerateRegisterArgsPush(masm); 2102 GenerateRegisterArgsPush(masm);
2097 __ TailCallStub(&string_add_left_stub); 2103 __ TailCallStub(&string_add_left_stub);
2098 2104
2099 // Left operand is not a string, test right. 2105 // Left operand is not a string, test right.
2100 __ bind(&left_not_string); 2106 __ bind(&left_not_string);
2101 __ test(right, Immediate(kSmiTagMask)); 2107 __ test(right, Immediate(kSmiTagMask));
2102 __ j(zero, &call_runtime); 2108 __ j(zero, &call_runtime, Label::kNear);
2103 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); 2109 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
2104 __ j(above_equal, &call_runtime); 2110 __ j(above_equal, &call_runtime, Label::kNear);
2105 2111
2106 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); 2112 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2107 GenerateRegisterArgsPush(masm); 2113 GenerateRegisterArgsPush(masm);
2108 __ TailCallStub(&string_add_right_stub); 2114 __ TailCallStub(&string_add_right_stub);
2109 2115
2110 // Neither argument is a string. 2116 // Neither argument is a string.
2111 __ bind(&call_runtime); 2117 __ bind(&call_runtime);
2112 } 2118 }
2113 2119
2114 2120
2115 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( 2121 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation(
2116 MacroAssembler* masm, 2122 MacroAssembler* masm,
2117 Label* alloc_failure) { 2123 Label* alloc_failure) {
2118 Label skip_allocation; 2124 Label skip_allocation;
2119 OverwriteMode mode = mode_; 2125 OverwriteMode mode = mode_;
2120 switch (mode) { 2126 switch (mode) {
2121 case OVERWRITE_LEFT: { 2127 case OVERWRITE_LEFT: {
2122 // If the argument in edx is already an object, we skip the 2128 // If the argument in edx is already an object, we skip the
2123 // allocation of a heap number. 2129 // allocation of a heap number.
2124 __ test(edx, Immediate(kSmiTagMask)); 2130 __ test(edx, Immediate(kSmiTagMask));
2125 __ j(not_zero, &skip_allocation, not_taken); 2131 __ j(not_zero, &skip_allocation);
2126 // Allocate a heap number for the result. Keep eax and edx intact 2132 // Allocate a heap number for the result. Keep eax and edx intact
2127 // for the possible runtime call. 2133 // for the possible runtime call.
2128 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure); 2134 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2129 // Now edx can be overwritten losing one of the arguments as we are 2135 // Now edx can be overwritten losing one of the arguments as we are
2130 // now done and will not need it any more. 2136 // now done and will not need it any more.
2131 __ mov(edx, Operand(ebx)); 2137 __ mov(edx, Operand(ebx));
2132 __ bind(&skip_allocation); 2138 __ bind(&skip_allocation);
2133 // Use object in edx as a result holder 2139 // Use object in edx as a result holder
2134 __ mov(eax, Operand(edx)); 2140 __ mov(eax, Operand(edx));
2135 break; 2141 break;
2136 } 2142 }
2137 case OVERWRITE_RIGHT: 2143 case OVERWRITE_RIGHT:
2138 // If the argument in eax is already an object, we skip the 2144 // If the argument in eax is already an object, we skip the
2139 // allocation of a heap number. 2145 // allocation of a heap number.
2140 __ test(eax, Immediate(kSmiTagMask)); 2146 __ test(eax, Immediate(kSmiTagMask));
2141 __ j(not_zero, &skip_allocation, not_taken); 2147 __ j(not_zero, &skip_allocation);
2142 // Fall through! 2148 // Fall through!
2143 case NO_OVERWRITE: 2149 case NO_OVERWRITE:
2144 // Allocate a heap number for the result. Keep eax and edx intact 2150 // Allocate a heap number for the result. Keep eax and edx intact
2145 // for the possible runtime call. 2151 // for the possible runtime call.
2146 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure); 2152 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
2147 // Now eax can be overwritten losing one of the arguments as we are 2153 // Now eax can be overwritten losing one of the arguments as we are
2148 // now done and will not need it any more. 2154 // now done and will not need it any more.
2149 __ mov(eax, ebx); 2155 __ mov(eax, ebx);
2150 __ bind(&skip_allocation); 2156 __ bind(&skip_allocation);
2151 break; 2157 break;
(...skipping 23 matching lines...) Expand all
2175 // xmm1: untagged double input argument 2181 // xmm1: untagged double input argument
2176 // Output: 2182 // Output:
2177 // xmm1: untagged double result. 2183 // xmm1: untagged double result.
2178 2184
2179 Label runtime_call; 2185 Label runtime_call;
2180 Label runtime_call_clear_stack; 2186 Label runtime_call_clear_stack;
2181 Label skip_cache; 2187 Label skip_cache;
2182 const bool tagged = (argument_type_ == TAGGED); 2188 const bool tagged = (argument_type_ == TAGGED);
2183 if (tagged) { 2189 if (tagged) {
2184 // Test that eax is a number. 2190 // Test that eax is a number.
2185 NearLabel input_not_smi; 2191 Label input_not_smi;
2186 NearLabel loaded; 2192 Label loaded;
2187 __ mov(eax, Operand(esp, kPointerSize)); 2193 __ mov(eax, Operand(esp, kPointerSize));
2188 __ test(eax, Immediate(kSmiTagMask)); 2194 __ test(eax, Immediate(kSmiTagMask));
2189 __ j(not_zero, &input_not_smi); 2195 __ j(not_zero, &input_not_smi, Label::kNear);
2190 // Input is a smi. Untag and load it onto the FPU stack. 2196 // Input is a smi. Untag and load it onto the FPU stack.
2191 // Then load the low and high words of the double into ebx, edx. 2197 // Then load the low and high words of the double into ebx, edx.
2192 STATIC_ASSERT(kSmiTagSize == 1); 2198 STATIC_ASSERT(kSmiTagSize == 1);
2193 __ sar(eax, 1); 2199 __ sar(eax, 1);
2194 __ sub(Operand(esp), Immediate(2 * kPointerSize)); 2200 __ sub(Operand(esp), Immediate(2 * kPointerSize));
2195 __ mov(Operand(esp, 0), eax); 2201 __ mov(Operand(esp, 0), eax);
2196 __ fild_s(Operand(esp, 0)); 2202 __ fild_s(Operand(esp, 0));
2197 __ fst_d(Operand(esp, 0)); 2203 __ fst_d(Operand(esp, 0));
2198 __ pop(edx); 2204 __ pop(edx);
2199 __ pop(ebx); 2205 __ pop(ebx);
2200 __ jmp(&loaded); 2206 __ jmp(&loaded, Label::kNear);
2201 __ bind(&input_not_smi); 2207 __ bind(&input_not_smi);
2202 // Check if input is a HeapNumber. 2208 // Check if input is a HeapNumber.
2203 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2209 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2204 Factory* factory = masm->isolate()->factory(); 2210 Factory* factory = masm->isolate()->factory();
2205 __ cmp(Operand(ebx), Immediate(factory->heap_number_map())); 2211 __ cmp(Operand(ebx), Immediate(factory->heap_number_map()));
2206 __ j(not_equal, &runtime_call); 2212 __ j(not_equal, &runtime_call);
2207 // Input is a HeapNumber. Push it on the FPU stack and load its 2213 // Input is a HeapNumber. Push it on the FPU stack and load its
2208 // low and high words into ebx, edx. 2214 // low and high words into ebx, edx.
2209 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); 2215 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
2210 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); 2216 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
2264 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer. 2270 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
2265 CHECK_EQ(0, elem_in0 - elem_start); 2271 CHECK_EQ(0, elem_in0 - elem_start);
2266 CHECK_EQ(kIntSize, elem_in1 - elem_start); 2272 CHECK_EQ(kIntSize, elem_in1 - elem_start);
2267 CHECK_EQ(2 * kIntSize, elem_out - elem_start); 2273 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
2268 } 2274 }
2269 #endif 2275 #endif
2270 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12]. 2276 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
2271 __ lea(ecx, Operand(ecx, ecx, times_2, 0)); 2277 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
2272 __ lea(ecx, Operand(eax, ecx, times_4, 0)); 2278 __ lea(ecx, Operand(eax, ecx, times_4, 0));
2273 // Check if cache matches: Double value is stored in uint32_t[2] array. 2279 // Check if cache matches: Double value is stored in uint32_t[2] array.
2274 NearLabel cache_miss; 2280 Label cache_miss;
2275 __ cmp(ebx, Operand(ecx, 0)); 2281 __ cmp(ebx, Operand(ecx, 0));
2276 __ j(not_equal, &cache_miss); 2282 __ j(not_equal, &cache_miss, Label::kNear);
2277 __ cmp(edx, Operand(ecx, kIntSize)); 2283 __ cmp(edx, Operand(ecx, kIntSize));
2278 __ j(not_equal, &cache_miss); 2284 __ j(not_equal, &cache_miss, Label::kNear);
2279 // Cache hit! 2285 // Cache hit!
2280 __ mov(eax, Operand(ecx, 2 * kIntSize)); 2286 __ mov(eax, Operand(ecx, 2 * kIntSize));
2281 if (tagged) { 2287 if (tagged) {
2282 __ fstp(0); 2288 __ fstp(0);
2283 __ ret(kPointerSize); 2289 __ ret(kPointerSize);
2284 } else { // UNTAGGED. 2290 } else { // UNTAGGED.
2285 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 2291 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2286 __ Ret(); 2292 __ Ret();
2287 } 2293 }
2288 2294
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
2366 2372
2367 void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { 2373 void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
2368 // Only free register is edi. 2374 // Only free register is edi.
2369 // Input value is on FP stack, and also in ebx/edx. 2375 // Input value is on FP stack, and also in ebx/edx.
2370 // Input value is possibly in xmm1. 2376 // Input value is possibly in xmm1.
2371 // Address of result (a newly allocated HeapNumber) may be in eax. 2377 // Address of result (a newly allocated HeapNumber) may be in eax.
2372 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) { 2378 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
2373 // Both fsin and fcos require arguments in the range +/-2^63 and 2379 // Both fsin and fcos require arguments in the range +/-2^63 and
2374 // return NaN for infinities and NaN. They can share all code except 2380 // return NaN for infinities and NaN. They can share all code except
2375 // the actual fsin/fcos operation. 2381 // the actual fsin/fcos operation.
2376 NearLabel in_range, done; 2382 Label in_range, done;
2377 // If argument is outside the range -2^63..2^63, fsin/cos doesn't 2383 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
2378 // work. We must reduce it to the appropriate range. 2384 // work. We must reduce it to the appropriate range.
2379 __ mov(edi, edx); 2385 __ mov(edi, edx);
2380 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only. 2386 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
2381 int supported_exponent_limit = 2387 int supported_exponent_limit =
2382 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift; 2388 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
2383 __ cmp(Operand(edi), Immediate(supported_exponent_limit)); 2389 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
2384 __ j(below, &in_range, taken); 2390 __ j(below, &in_range, Label::kNear);
2385 // Check for infinity and NaN. Both return NaN for sin. 2391 // Check for infinity and NaN. Both return NaN for sin.
2386 __ cmp(Operand(edi), Immediate(0x7ff00000)); 2392 __ cmp(Operand(edi), Immediate(0x7ff00000));
2387 NearLabel non_nan_result; 2393 Label non_nan_result;
2388 __ j(not_equal, &non_nan_result, taken); 2394 __ j(not_equal, &non_nan_result, Label::kNear);
2389 // Input is +/-Infinity or NaN. Result is NaN. 2395 // Input is +/-Infinity or NaN. Result is NaN.
2390 __ fstp(0); 2396 __ fstp(0);
2391 // NaN is represented by 0x7ff8000000000000. 2397 // NaN is represented by 0x7ff8000000000000.
2392 __ push(Immediate(0x7ff80000)); 2398 __ push(Immediate(0x7ff80000));
2393 __ push(Immediate(0)); 2399 __ push(Immediate(0));
2394 __ fld_d(Operand(esp, 0)); 2400 __ fld_d(Operand(esp, 0));
2395 __ add(Operand(esp), Immediate(2 * kPointerSize)); 2401 __ add(Operand(esp), Immediate(2 * kPointerSize));
2396 __ jmp(&done); 2402 __ jmp(&done, Label::kNear);
2397 2403
2398 __ bind(&non_nan_result); 2404 __ bind(&non_nan_result);
2399 2405
2400 // Use fpmod to restrict argument to the range +/-2*PI. 2406 // Use fpmod to restrict argument to the range +/-2*PI.
2401 __ mov(edi, eax); // Save eax before using fnstsw_ax. 2407 __ mov(edi, eax); // Save eax before using fnstsw_ax.
2402 __ fldpi(); 2408 __ fldpi();
2403 __ fadd(0); 2409 __ fadd(0);
2404 __ fld(1); 2410 __ fld(1);
2405 // FPU Stack: input, 2*pi, input. 2411 // FPU Stack: input, 2*pi, input.
2406 { 2412 {
2407 NearLabel no_exceptions; 2413 Label no_exceptions;
2408 __ fwait(); 2414 __ fwait();
2409 __ fnstsw_ax(); 2415 __ fnstsw_ax();
2410 // Clear if Illegal Operand or Zero Division exceptions are set. 2416 // Clear if Illegal Operand or Zero Division exceptions are set.
2411 __ test(Operand(eax), Immediate(5)); 2417 __ test(Operand(eax), Immediate(5));
2412 __ j(zero, &no_exceptions); 2418 __ j(zero, &no_exceptions, Label::kNear);
2413 __ fnclex(); 2419 __ fnclex();
2414 __ bind(&no_exceptions); 2420 __ bind(&no_exceptions);
2415 } 2421 }
2416 2422
2417 // Compute st(0) % st(1) 2423 // Compute st(0) % st(1)
2418 { 2424 {
2419 NearLabel partial_remainder_loop; 2425 Label partial_remainder_loop;
2420 __ bind(&partial_remainder_loop); 2426 __ bind(&partial_remainder_loop);
2421 __ fprem1(); 2427 __ fprem1();
2422 __ fwait(); 2428 __ fwait();
2423 __ fnstsw_ax(); 2429 __ fnstsw_ax();
2424 __ test(Operand(eax), Immediate(0x400 /* C2 */)); 2430 __ test(Operand(eax), Immediate(0x400 /* C2 */));
2425 // If C2 is set, computation only has partial result. Loop to 2431 // If C2 is set, computation only has partial result. Loop to
2426 // continue computation. 2432 // continue computation.
2427 __ j(not_zero, &partial_remainder_loop); 2433 __ j(not_zero, &partial_remainder_loop);
2428 } 2434 }
2429 // FPU Stack: input, 2*pi, input % 2*pi 2435 // FPU Stack: input, 2*pi, input % 2*pi
(...skipping 18 matching lines...) Expand all
2448 ASSERT(type_ == TranscendentalCache::LOG); 2454 ASSERT(type_ == TranscendentalCache::LOG);
2449 __ fldln2(); 2455 __ fldln2();
2450 __ fxch(); 2456 __ fxch();
2451 __ fyl2x(); 2457 __ fyl2x();
2452 } 2458 }
2453 } 2459 }
2454 2460
2455 2461
2456 // Input: edx, eax are the left and right objects of a bit op. 2462 // Input: edx, eax are the left and right objects of a bit op.
2457 // Output: eax, ecx are left and right integers for a bit op. 2463 // Output: eax, ecx are left and right integers for a bit op.
2458 void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
2459 TypeInfo type_info,
2460 bool use_sse3,
2461 Label* conversion_failure) {
2462 // Check float operands.
2463 Label arg1_is_object, check_undefined_arg1;
2464 Label arg2_is_object, check_undefined_arg2;
2465 Label load_arg2, done;
2466
2467 if (!type_info.IsDouble()) {
2468 if (!type_info.IsSmi()) {
2469 __ test(edx, Immediate(kSmiTagMask));
2470 __ j(not_zero, &arg1_is_object);
2471 } else {
2472 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
2473 }
2474 __ SmiUntag(edx);
2475 __ jmp(&load_arg2);
2476 }
2477
2478 __ bind(&arg1_is_object);
2479
2480 // Get the untagged integer version of the edx heap number in ecx.
2481 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
2482 __ mov(edx, ecx);
2483
2484 // Here edx has the untagged integer, eax has a Smi or a heap number.
2485 __ bind(&load_arg2);
2486 if (!type_info.IsDouble()) {
2487 // Test if arg2 is a Smi.
2488 if (!type_info.IsSmi()) {
2489 __ test(eax, Immediate(kSmiTagMask));
2490 __ j(not_zero, &arg2_is_object);
2491 } else {
2492 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2493 }
2494 __ SmiUntag(eax);
2495 __ mov(ecx, eax);
2496 __ jmp(&done);
2497 }
2498
2499 __ bind(&arg2_is_object);
2500
2501 // Get the untagged integer version of the eax heap number in ecx.
2502 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
2503 __ bind(&done);
2504 __ mov(eax, edx);
2505 }
2506
2507
2508 // Input: edx, eax are the left and right objects of a bit op.
2509 // Output: eax, ecx are left and right integers for a bit op.
2510 void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm, 2464 void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
2511 bool use_sse3, 2465 bool use_sse3,
2512 Label* conversion_failure) { 2466 Label* conversion_failure) {
2513 // Check float operands. 2467 // Check float operands.
2514 Label arg1_is_object, check_undefined_arg1; 2468 Label arg1_is_object, check_undefined_arg1;
2515 Label arg2_is_object, check_undefined_arg2; 2469 Label arg2_is_object, check_undefined_arg2;
2516 Label load_arg2, done; 2470 Label load_arg2, done;
2517 2471
2518 // Test if arg1 is a Smi. 2472 // Test if arg1 is a Smi.
2519 __ test(edx, Immediate(kSmiTagMask)); 2473 __ test(edx, Immediate(kSmiTagMask));
2520 __ j(not_zero, &arg1_is_object); 2474 __ j(not_zero, &arg1_is_object);
2521 2475
2522 __ SmiUntag(edx); 2476 __ SmiUntag(edx);
2523 __ jmp(&load_arg2); 2477 __ jmp(&load_arg2);
2524 2478
2525 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 2479 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2526 __ bind(&check_undefined_arg1); 2480 __ bind(&check_undefined_arg1);
2527 Factory* factory = masm->isolate()->factory(); 2481 Factory* factory = masm->isolate()->factory();
2528 __ cmp(edx, factory->undefined_value()); 2482 __ cmp(edx, factory->undefined_value());
2529 __ j(not_equal, conversion_failure); 2483 __ j(not_equal, conversion_failure);
2530 __ mov(edx, Immediate(0)); 2484 __ mov(edx, Immediate(0));
2531 __ jmp(&load_arg2); 2485 __ jmp(&load_arg2);
2532 2486
2533 __ bind(&arg1_is_object); 2487 __ bind(&arg1_is_object);
2534 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 2488 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
2535 __ cmp(ebx, factory->heap_number_map()); 2489 __ cmp(ebx, factory->heap_number_map());
2536 __ j(not_equal, &check_undefined_arg1); 2490 __ j(not_equal, &check_undefined_arg1);
2537 2491
2538 // Get the untagged integer version of the edx heap number in ecx. 2492 // Get the untagged integer version of the edx heap number in ecx.
2539 IntegerConvert(masm, 2493 IntegerConvert(masm, edx, use_sse3, conversion_failure);
2540 edx,
2541 TypeInfo::Unknown(),
2542 use_sse3,
2543 conversion_failure);
2544 __ mov(edx, ecx); 2494 __ mov(edx, ecx);
2545 2495
2546 // Here edx has the untagged integer, eax has a Smi or a heap number. 2496 // Here edx has the untagged integer, eax has a Smi or a heap number.
2547 __ bind(&load_arg2); 2497 __ bind(&load_arg2);
2548 2498
2549 // Test if arg2 is a Smi. 2499 // Test if arg2 is a Smi.
2550 __ test(eax, Immediate(kSmiTagMask)); 2500 __ test(eax, Immediate(kSmiTagMask));
2551 __ j(not_zero, &arg2_is_object); 2501 __ j(not_zero, &arg2_is_object);
2552 2502
2553 __ SmiUntag(eax); 2503 __ SmiUntag(eax);
2554 __ mov(ecx, eax); 2504 __ mov(ecx, eax);
2555 __ jmp(&done); 2505 __ jmp(&done);
2556 2506
2557 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 2507 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
2558 __ bind(&check_undefined_arg2); 2508 __ bind(&check_undefined_arg2);
2559 __ cmp(eax, factory->undefined_value()); 2509 __ cmp(eax, factory->undefined_value());
2560 __ j(not_equal, conversion_failure); 2510 __ j(not_equal, conversion_failure);
2561 __ mov(ecx, Immediate(0)); 2511 __ mov(ecx, Immediate(0));
2562 __ jmp(&done); 2512 __ jmp(&done);
2563 2513
2564 __ bind(&arg2_is_object); 2514 __ bind(&arg2_is_object);
2565 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2515 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2566 __ cmp(ebx, factory->heap_number_map()); 2516 __ cmp(ebx, factory->heap_number_map());
2567 __ j(not_equal, &check_undefined_arg2); 2517 __ j(not_equal, &check_undefined_arg2);
2568 2518
2569 // Get the untagged integer version of the eax heap number in ecx. 2519 // Get the untagged integer version of the eax heap number in ecx.
2570 IntegerConvert(masm, 2520 IntegerConvert(masm, eax, use_sse3, conversion_failure);
2571 eax,
2572 TypeInfo::Unknown(),
2573 use_sse3,
2574 conversion_failure);
2575 __ bind(&done); 2521 __ bind(&done);
2576 __ mov(eax, edx); 2522 __ mov(eax, edx);
2577 } 2523 }
2578 2524
2579 2525
2580 void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
2581 TypeInfo type_info,
2582 bool use_sse3,
2583 Label* conversion_failure) {
2584 if (type_info.IsNumber()) {
2585 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
2586 } else {
2587 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
2588 }
2589 }
2590
2591
2592 void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm, 2526 void FloatingPointHelper::CheckLoadedIntegersWereInt32(MacroAssembler* masm,
2593 bool use_sse3, 2527 bool use_sse3,
2594 Label* not_int32) { 2528 Label* not_int32) {
2595 return; 2529 return;
2596 } 2530 }
2597 2531
2598 2532
2599 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, 2533 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
2600 Register number) { 2534 Register number) {
2601 NearLabel load_smi, done; 2535 Label load_smi, done;
2602 2536
2603 __ test(number, Immediate(kSmiTagMask)); 2537 __ test(number, Immediate(kSmiTagMask));
2604 __ j(zero, &load_smi, not_taken); 2538 __ j(zero, &load_smi, Label::kNear);
2605 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); 2539 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
2606 __ jmp(&done); 2540 __ jmp(&done, Label::kNear);
2607 2541
2608 __ bind(&load_smi); 2542 __ bind(&load_smi);
2609 __ SmiUntag(number); 2543 __ SmiUntag(number);
2610 __ push(number); 2544 __ push(number);
2611 __ fild_s(Operand(esp, 0)); 2545 __ fild_s(Operand(esp, 0));
2612 __ pop(number); 2546 __ pop(number);
2613 2547
2614 __ bind(&done); 2548 __ bind(&done);
2615 } 2549 }
2616 2550
2617 2551
2618 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) { 2552 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
2619 NearLabel load_smi_edx, load_eax, load_smi_eax, done; 2553 Label load_smi_edx, load_eax, load_smi_eax, done;
2620 // Load operand in edx into xmm0. 2554 // Load operand in edx into xmm0.
2621 __ test(edx, Immediate(kSmiTagMask)); 2555 __ test(edx, Immediate(kSmiTagMask));
2622 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi. 2556 // Argument in edx is a smi.
2557 __ j(zero, &load_smi_edx, Label::kNear);
2623 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 2558 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2624 2559
2625 __ bind(&load_eax); 2560 __ bind(&load_eax);
2626 // Load operand in eax into xmm1. 2561 // Load operand in eax into xmm1.
2627 __ test(eax, Immediate(kSmiTagMask)); 2562 __ test(eax, Immediate(kSmiTagMask));
2628 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi. 2563 // Argument in eax is a smi.
2564 __ j(zero, &load_smi_eax, Label::kNear);
2629 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 2565 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2630 __ jmp(&done); 2566 __ jmp(&done, Label::kNear);
2631 2567
2632 __ bind(&load_smi_edx); 2568 __ bind(&load_smi_edx);
2633 __ SmiUntag(edx); // Untag smi before converting to float. 2569 __ SmiUntag(edx); // Untag smi before converting to float.
2634 __ cvtsi2sd(xmm0, Operand(edx)); 2570 __ cvtsi2sd(xmm0, Operand(edx));
2635 __ SmiTag(edx); // Retag smi for heap number overwriting test. 2571 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2636 __ jmp(&load_eax); 2572 __ jmp(&load_eax);
2637 2573
2638 __ bind(&load_smi_eax); 2574 __ bind(&load_smi_eax);
2639 __ SmiUntag(eax); // Untag smi before converting to float. 2575 __ SmiUntag(eax); // Untag smi before converting to float.
2640 __ cvtsi2sd(xmm1, Operand(eax)); 2576 __ cvtsi2sd(xmm1, Operand(eax));
2641 __ SmiTag(eax); // Retag smi for heap number overwriting test. 2577 __ SmiTag(eax); // Retag smi for heap number overwriting test.
2642 2578
2643 __ bind(&done); 2579 __ bind(&done);
2644 } 2580 }
2645 2581
2646 2582
2647 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, 2583 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
2648 Label* not_numbers) { 2584 Label* not_numbers) {
2649 NearLabel load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; 2585 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
2650 // Load operand in edx into xmm0, or branch to not_numbers. 2586 // Load operand in edx into xmm0, or branch to not_numbers.
2651 __ test(edx, Immediate(kSmiTagMask)); 2587 __ test(edx, Immediate(kSmiTagMask));
2652 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi. 2588 // Argument in edx is a smi.
2589 __ j(zero, &load_smi_edx, Label::kNear);
2653 Factory* factory = masm->isolate()->factory(); 2590 Factory* factory = masm->isolate()->factory();
2654 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map()); 2591 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
2655 __ j(not_equal, not_numbers); // Argument in edx is not a number. 2592 __ j(not_equal, not_numbers); // Argument in edx is not a number.
2656 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 2593 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2657 __ bind(&load_eax); 2594 __ bind(&load_eax);
2658 // Load operand in eax into xmm1, or branch to not_numbers. 2595 // Load operand in eax into xmm1, or branch to not_numbers.
2659 __ test(eax, Immediate(kSmiTagMask)); 2596 __ test(eax, Immediate(kSmiTagMask));
2660 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi. 2597 // Argument in eax is a smi.
2598 __ j(zero, &load_smi_eax, Label::kNear);
2661 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map()); 2599 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
2662 __ j(equal, &load_float_eax); 2600 __ j(equal, &load_float_eax, Label::kNear);
2663 __ jmp(not_numbers); // Argument in eax is not a number. 2601 __ jmp(not_numbers); // Argument in eax is not a number.
2664 __ bind(&load_smi_edx); 2602 __ bind(&load_smi_edx);
2665 __ SmiUntag(edx); // Untag smi before converting to float. 2603 __ SmiUntag(edx); // Untag smi before converting to float.
2666 __ cvtsi2sd(xmm0, Operand(edx)); 2604 __ cvtsi2sd(xmm0, Operand(edx));
2667 __ SmiTag(edx); // Retag smi for heap number overwriting test. 2605 __ SmiTag(edx); // Retag smi for heap number overwriting test.
2668 __ jmp(&load_eax); 2606 __ jmp(&load_eax);
2669 __ bind(&load_smi_eax); 2607 __ bind(&load_smi_eax);
2670 __ SmiUntag(eax); // Untag smi before converting to float. 2608 __ SmiUntag(eax); // Untag smi before converting to float.
2671 __ cvtsi2sd(xmm1, Operand(eax)); 2609 __ cvtsi2sd(xmm1, Operand(eax));
2672 __ SmiTag(eax); // Retag smi for heap number overwriting test. 2610 __ SmiTag(eax); // Retag smi for heap number overwriting test.
2673 __ jmp(&done); 2611 __ jmp(&done, Label::kNear);
2674 __ bind(&load_float_eax); 2612 __ bind(&load_float_eax);
2675 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 2613 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2676 __ bind(&done); 2614 __ bind(&done);
2677 } 2615 }
2678 2616
2679 2617
2680 void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm, 2618 void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
2681 Register scratch) { 2619 Register scratch) {
2682 const Register left = edx; 2620 const Register left = edx;
2683 const Register right = eax; 2621 const Register right = eax;
(...skipping 20 matching lines...) Expand all
2704 __ cvtsi2sd(xmm2, Operand(scratch)); 2642 __ cvtsi2sd(xmm2, Operand(scratch));
2705 __ ucomisd(xmm1, xmm2); 2643 __ ucomisd(xmm1, xmm2);
2706 __ j(not_zero, non_int32); 2644 __ j(not_zero, non_int32);
2707 __ j(carry, non_int32); 2645 __ j(carry, non_int32);
2708 } 2646 }
2709 2647
2710 2648
2711 void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm, 2649 void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
2712 Register scratch, 2650 Register scratch,
2713 ArgLocation arg_location) { 2651 ArgLocation arg_location) {
2714 NearLabel load_smi_1, load_smi_2, done_load_1, done; 2652 Label load_smi_1, load_smi_2, done_load_1, done;
2715 if (arg_location == ARGS_IN_REGISTERS) { 2653 if (arg_location == ARGS_IN_REGISTERS) {
2716 __ mov(scratch, edx); 2654 __ mov(scratch, edx);
2717 } else { 2655 } else {
2718 __ mov(scratch, Operand(esp, 2 * kPointerSize)); 2656 __ mov(scratch, Operand(esp, 2 * kPointerSize));
2719 } 2657 }
2720 __ test(scratch, Immediate(kSmiTagMask)); 2658 __ test(scratch, Immediate(kSmiTagMask));
2721 __ j(zero, &load_smi_1, not_taken); 2659 __ j(zero, &load_smi_1, Label::kNear);
2722 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset)); 2660 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2723 __ bind(&done_load_1); 2661 __ bind(&done_load_1);
2724 2662
2725 if (arg_location == ARGS_IN_REGISTERS) { 2663 if (arg_location == ARGS_IN_REGISTERS) {
2726 __ mov(scratch, eax); 2664 __ mov(scratch, eax);
2727 } else { 2665 } else {
2728 __ mov(scratch, Operand(esp, 1 * kPointerSize)); 2666 __ mov(scratch, Operand(esp, 1 * kPointerSize));
2729 } 2667 }
2730 __ test(scratch, Immediate(kSmiTagMask)); 2668 __ test(scratch, Immediate(kSmiTagMask));
2731 __ j(zero, &load_smi_2, not_taken); 2669 __ j(zero, &load_smi_2, Label::kNear);
2732 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset)); 2670 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
2733 __ jmp(&done); 2671 __ jmp(&done, Label::kNear);
2734 2672
2735 __ bind(&load_smi_1); 2673 __ bind(&load_smi_1);
2736 __ SmiUntag(scratch); 2674 __ SmiUntag(scratch);
2737 __ push(scratch); 2675 __ push(scratch);
2738 __ fild_s(Operand(esp, 0)); 2676 __ fild_s(Operand(esp, 0));
2739 __ pop(scratch); 2677 __ pop(scratch);
2740 __ jmp(&done_load_1); 2678 __ jmp(&done_load_1);
2741 2679
2742 __ bind(&load_smi_2); 2680 __ bind(&load_smi_2);
2743 __ SmiUntag(scratch); 2681 __ SmiUntag(scratch);
(...skipping 19 matching lines...) Expand all
2763 __ SmiUntag(scratch); 2701 __ SmiUntag(scratch);
2764 __ mov(Operand(esp, 0), scratch); 2702 __ mov(Operand(esp, 0), scratch);
2765 __ fild_s(Operand(esp, 0)); 2703 __ fild_s(Operand(esp, 0));
2766 __ pop(scratch); 2704 __ pop(scratch);
2767 } 2705 }
2768 2706
2769 2707
2770 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, 2708 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
2771 Label* non_float, 2709 Label* non_float,
2772 Register scratch) { 2710 Register scratch) {
2773 NearLabel test_other, done; 2711 Label test_other, done;
2774 // Test if both operands are floats or smi -> scratch=k_is_float; 2712 // Test if both operands are floats or smi -> scratch=k_is_float;
2775 // Otherwise scratch = k_not_float. 2713 // Otherwise scratch = k_not_float.
2776 __ test(edx, Immediate(kSmiTagMask)); 2714 __ test(edx, Immediate(kSmiTagMask));
2777 __ j(zero, &test_other, not_taken); // argument in edx is OK 2715 __ j(zero, &test_other, Label::kNear); // argument in edx is OK
2778 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset)); 2716 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
2779 Factory* factory = masm->isolate()->factory(); 2717 Factory* factory = masm->isolate()->factory();
2780 __ cmp(scratch, factory->heap_number_map()); 2718 __ cmp(scratch, factory->heap_number_map());
2781 __ j(not_equal, non_float); // argument in edx is not a number -> NaN 2719 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
2782 2720
2783 __ bind(&test_other); 2721 __ bind(&test_other);
2784 __ test(eax, Immediate(kSmiTagMask)); 2722 __ test(eax, Immediate(kSmiTagMask));
2785 __ j(zero, &done); // argument in eax is OK 2723 __ j(zero, &done, Label::kNear); // argument in eax is OK
2786 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset)); 2724 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
2787 __ cmp(scratch, factory->heap_number_map()); 2725 __ cmp(scratch, factory->heap_number_map());
2788 __ j(not_equal, non_float); // argument in eax is not a number -> NaN 2726 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
2789 2727
2790 // Fall-through: Both operands are numbers. 2728 // Fall-through: Both operands are numbers.
2791 __ bind(&done); 2729 __ bind(&done);
2792 } 2730 }
2793 2731
2794 2732
2795 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm, 2733 void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2840 // Optimized version of pow if exponent is a smi. 2778 // Optimized version of pow if exponent is a smi.
2841 // xmm0 contains the base. 2779 // xmm0 contains the base.
2842 __ bind(&powi); 2780 __ bind(&powi);
2843 __ SmiUntag(eax); 2781 __ SmiUntag(eax);
2844 2782
2845 // Save exponent in base as we need to check if exponent is negative later. 2783 // Save exponent in base as we need to check if exponent is negative later.
2846 // We know that base and exponent are in different registers. 2784 // We know that base and exponent are in different registers.
2847 __ mov(edx, eax); 2785 __ mov(edx, eax);
2848 2786
2849 // Get absolute value of exponent. 2787 // Get absolute value of exponent.
2850 NearLabel no_neg; 2788 Label no_neg;
2851 __ cmp(eax, 0); 2789 __ cmp(eax, 0);
2852 __ j(greater_equal, &no_neg); 2790 __ j(greater_equal, &no_neg, Label::kNear);
2853 __ neg(eax); 2791 __ neg(eax);
2854 __ bind(&no_neg); 2792 __ bind(&no_neg);
2855 2793
2856 // Load xmm1 with 1. 2794 // Load xmm1 with 1.
2857 __ movsd(xmm1, xmm3); 2795 __ movsd(xmm1, xmm3);
2858 NearLabel while_true; 2796 Label while_true;
2859 NearLabel no_multiply; 2797 Label no_multiply;
2860 2798
2861 __ bind(&while_true); 2799 __ bind(&while_true);
2862 __ shr(eax, 1); 2800 __ shr(eax, 1);
2863 __ j(not_carry, &no_multiply); 2801 __ j(not_carry, &no_multiply, Label::kNear);
2864 __ mulsd(xmm1, xmm0); 2802 __ mulsd(xmm1, xmm0);
2865 __ bind(&no_multiply); 2803 __ bind(&no_multiply);
2866 __ mulsd(xmm0, xmm0); 2804 __ mulsd(xmm0, xmm0);
2867 __ j(not_zero, &while_true); 2805 __ j(not_zero, &while_true);
2868 2806
2869 // base has the original value of the exponent - if the exponent is 2807 // base has the original value of the exponent - if the exponent is
2870 // negative return 1/result. 2808 // negative return 1/result.
2871 __ test(edx, Operand(edx)); 2809 __ test(edx, Operand(edx));
2872 __ j(positive, &allocate_return); 2810 __ j(positive, &allocate_return);
2873 // Special case if xmm1 has reached infinity. 2811 // Special case if xmm1 has reached infinity.
(...skipping 10 matching lines...) Expand all
2884 // on doubles. 2822 // on doubles.
2885 __ bind(&exponent_nonsmi); 2823 __ bind(&exponent_nonsmi);
2886 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 2824 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2887 factory->heap_number_map()); 2825 factory->heap_number_map());
2888 __ j(not_equal, &call_runtime); 2826 __ j(not_equal, &call_runtime);
2889 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 2827 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
2890 // Test if exponent is nan. 2828 // Test if exponent is nan.
2891 __ ucomisd(xmm1, xmm1); 2829 __ ucomisd(xmm1, xmm1);
2892 __ j(parity_even, &call_runtime); 2830 __ j(parity_even, &call_runtime);
2893 2831
2894 NearLabel base_not_smi; 2832 Label base_not_smi;
2895 NearLabel handle_special_cases; 2833 Label handle_special_cases;
2896 __ test(edx, Immediate(kSmiTagMask)); 2834 __ test(edx, Immediate(kSmiTagMask));
2897 __ j(not_zero, &base_not_smi); 2835 __ j(not_zero, &base_not_smi, Label::kNear);
2898 __ SmiUntag(edx); 2836 __ SmiUntag(edx);
2899 __ cvtsi2sd(xmm0, Operand(edx)); 2837 __ cvtsi2sd(xmm0, Operand(edx));
2900 __ jmp(&handle_special_cases); 2838 __ jmp(&handle_special_cases, Label::kNear);
2901 2839
2902 __ bind(&base_not_smi); 2840 __ bind(&base_not_smi);
2903 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2841 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2904 factory->heap_number_map()); 2842 factory->heap_number_map());
2905 __ j(not_equal, &call_runtime); 2843 __ j(not_equal, &call_runtime);
2906 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); 2844 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
2907 __ and_(ecx, HeapNumber::kExponentMask); 2845 __ and_(ecx, HeapNumber::kExponentMask);
2908 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask)); 2846 __ cmp(Operand(ecx), Immediate(HeapNumber::kExponentMask));
2909 // base is NaN or +/-Infinity 2847 // base is NaN or +/-Infinity
2910 __ j(greater_equal, &call_runtime); 2848 __ j(greater_equal, &call_runtime);
2911 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 2849 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
2912 2850
2913 // base is in xmm0 and exponent is in xmm1. 2851 // base is in xmm0 and exponent is in xmm1.
2914 __ bind(&handle_special_cases); 2852 __ bind(&handle_special_cases);
2915 NearLabel not_minus_half; 2853 Label not_minus_half;
2916 // Test for -0.5. 2854 // Test for -0.5.
2917 // Load xmm2 with -0.5. 2855 // Load xmm2 with -0.5.
2918 __ mov(ecx, Immediate(0xBF000000)); 2856 __ mov(ecx, Immediate(0xBF000000));
2919 __ movd(xmm2, Operand(ecx)); 2857 __ movd(xmm2, Operand(ecx));
2920 __ cvtss2sd(xmm2, xmm2); 2858 __ cvtss2sd(xmm2, xmm2);
2921 // xmm2 now has -0.5. 2859 // xmm2 now has -0.5.
2922 __ ucomisd(xmm2, xmm1); 2860 __ ucomisd(xmm2, xmm1);
2923 __ j(not_equal, &not_minus_half); 2861 __ j(not_equal, &not_minus_half, Label::kNear);
2924 2862
2925 // Calculates reciprocal of square root. 2863 // Calculates reciprocal of square root.
2926 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 2864 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
2927 __ xorps(xmm1, xmm1); 2865 __ xorps(xmm1, xmm1);
2928 __ addsd(xmm1, xmm0); 2866 __ addsd(xmm1, xmm0);
2929 __ sqrtsd(xmm1, xmm1); 2867 __ sqrtsd(xmm1, xmm1);
2930 __ divsd(xmm3, xmm1); 2868 __ divsd(xmm3, xmm1);
2931 __ movsd(xmm1, xmm3); 2869 __ movsd(xmm1, xmm3);
2932 __ jmp(&allocate_return); 2870 __ jmp(&allocate_return);
2933 2871
(...skipping 26 matching lines...) Expand all
2960 // The key is in edx and the parameter count is in eax. 2898 // The key is in edx and the parameter count is in eax.
2961 2899
2962 // The displacement is used for skipping the frame pointer on the 2900 // The displacement is used for skipping the frame pointer on the
2963 // stack. It is the offset of the last parameter (if any) relative 2901 // stack. It is the offset of the last parameter (if any) relative
2964 // to the frame pointer. 2902 // to the frame pointer.
2965 static const int kDisplacement = 1 * kPointerSize; 2903 static const int kDisplacement = 1 * kPointerSize;
2966 2904
2967 // Check that the key is a smi. 2905 // Check that the key is a smi.
2968 Label slow; 2906 Label slow;
2969 __ test(edx, Immediate(kSmiTagMask)); 2907 __ test(edx, Immediate(kSmiTagMask));
2970 __ j(not_zero, &slow, not_taken); 2908 __ j(not_zero, &slow);
2971 2909
2972 // Check if the calling frame is an arguments adaptor frame. 2910 // Check if the calling frame is an arguments adaptor frame.
2973 NearLabel adaptor; 2911 Label adaptor;
2974 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2912 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2975 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset)); 2913 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
2976 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2914 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2977 __ j(equal, &adaptor); 2915 __ j(equal, &adaptor, Label::kNear);
2978 2916
2979 // Check index against formal parameters count limit passed in 2917 // Check index against formal parameters count limit passed in
2980 // through register eax. Use unsigned comparison to get negative 2918 // through register eax. Use unsigned comparison to get negative
2981 // check for free. 2919 // check for free.
2982 __ cmp(edx, Operand(eax)); 2920 __ cmp(edx, Operand(eax));
2983 __ j(above_equal, &slow, not_taken); 2921 __ j(above_equal, &slow);
2984 2922
2985 // Read the argument from the stack and return it. 2923 // Read the argument from the stack and return it.
2986 STATIC_ASSERT(kSmiTagSize == 1); 2924 STATIC_ASSERT(kSmiTagSize == 1);
2987 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. 2925 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
2988 __ lea(ebx, Operand(ebp, eax, times_2, 0)); 2926 __ lea(ebx, Operand(ebp, eax, times_2, 0));
2989 __ neg(edx); 2927 __ neg(edx);
2990 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); 2928 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
2991 __ ret(0); 2929 __ ret(0);
2992 2930
2993 // Arguments adaptor case: Check index against actual arguments 2931 // Arguments adaptor case: Check index against actual arguments
2994 // limit found in the arguments adaptor frame. Use unsigned 2932 // limit found in the arguments adaptor frame. Use unsigned
2995 // comparison to get negative check for free. 2933 // comparison to get negative check for free.
2996 __ bind(&adaptor); 2934 __ bind(&adaptor);
2997 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2935 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2998 __ cmp(edx, Operand(ecx)); 2936 __ cmp(edx, Operand(ecx));
2999 __ j(above_equal, &slow, not_taken); 2937 __ j(above_equal, &slow);
3000 2938
3001 // Read the argument from the stack and return it. 2939 // Read the argument from the stack and return it.
3002 STATIC_ASSERT(kSmiTagSize == 1); 2940 STATIC_ASSERT(kSmiTagSize == 1);
3003 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these. 2941 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
3004 __ lea(ebx, Operand(ebx, ecx, times_2, 0)); 2942 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
3005 __ neg(edx); 2943 __ neg(edx);
3006 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement)); 2944 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
3007 __ ret(0); 2945 __ ret(0);
3008 2946
3009 // Slow-case: Handle non-smi or out-of-bounds access to arguments 2947 // Slow-case: Handle non-smi or out-of-bounds access to arguments
(...skipping 30 matching lines...) Expand all
3040 2978
3041 // Patch the arguments.length and the parameters pointer. 2979 // Patch the arguments.length and the parameters pointer.
3042 __ bind(&adaptor_frame); 2980 __ bind(&adaptor_frame);
3043 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2981 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3044 __ mov(Operand(esp, 1 * kPointerSize), ecx); 2982 __ mov(Operand(esp, 1 * kPointerSize), ecx);
3045 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement)); 2983 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
3046 __ mov(Operand(esp, 2 * kPointerSize), edx); 2984 __ mov(Operand(esp, 2 * kPointerSize), edx);
3047 2985
3048 // Try the new space allocation. Start out with computing the size of 2986 // Try the new space allocation. Start out with computing the size of
3049 // the arguments object and the elements array. 2987 // the arguments object and the elements array.
3050 NearLabel add_arguments_object; 2988 Label add_arguments_object;
3051 __ bind(&try_allocate); 2989 __ bind(&try_allocate);
3052 __ test(ecx, Operand(ecx)); 2990 __ test(ecx, Operand(ecx));
3053 __ j(zero, &add_arguments_object); 2991 __ j(zero, &add_arguments_object, Label::kNear);
3054 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize)); 2992 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
3055 __ bind(&add_arguments_object); 2993 __ bind(&add_arguments_object);
3056 __ add(Operand(ecx), Immediate(GetArgumentsObjectSize())); 2994 __ add(Operand(ecx), Immediate(GetArgumentsObjectSize()));
3057 2995
3058 // Do the allocation of both objects in one go. 2996 // Do the allocation of both objects in one go.
3059 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT); 2997 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
3060 2998
3061 // Get the arguments boilerplate from the current (global) context. 2999 // Get the arguments boilerplate from the current (global) context.
3062 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); 3000 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
3063 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset)); 3001 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
3099 __ lea(edi, Operand(eax, GetArgumentsObjectSize())); 3037 __ lea(edi, Operand(eax, GetArgumentsObjectSize()));
3100 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 3038 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3101 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 3039 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3102 Immediate(masm->isolate()->factory()->fixed_array_map())); 3040 Immediate(masm->isolate()->factory()->fixed_array_map()));
3103 3041
3104 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 3042 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
3105 // Untag the length for the loop below. 3043 // Untag the length for the loop below.
3106 __ SmiUntag(ecx); 3044 __ SmiUntag(ecx);
3107 3045
3108 // Copy the fixed array slots. 3046 // Copy the fixed array slots.
3109 NearLabel loop; 3047 Label loop;
3110 __ bind(&loop); 3048 __ bind(&loop);
3111 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver. 3049 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
3112 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx); 3050 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
3113 __ add(Operand(edi), Immediate(kPointerSize)); 3051 __ add(Operand(edi), Immediate(kPointerSize));
3114 __ sub(Operand(edx), Immediate(kPointerSize)); 3052 __ sub(Operand(edx), Immediate(kPointerSize));
3115 __ dec(ecx); 3053 __ dec(ecx);
3116 __ j(not_zero, &loop); 3054 __ j(not_zero, &loop);
3117 3055
3118 // Return and remove the on-stack parameters. 3056 // Return and remove the on-stack parameters.
3119 __ bind(&done); 3057 __ bind(&done);
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3152 Label runtime, invoke_regexp; 3090 Label runtime, invoke_regexp;
3153 3091
3154 // Ensure that a RegExp stack is allocated. 3092 // Ensure that a RegExp stack is allocated.
3155 ExternalReference address_of_regexp_stack_memory_address = 3093 ExternalReference address_of_regexp_stack_memory_address =
3156 ExternalReference::address_of_regexp_stack_memory_address( 3094 ExternalReference::address_of_regexp_stack_memory_address(
3157 masm->isolate()); 3095 masm->isolate());
3158 ExternalReference address_of_regexp_stack_memory_size = 3096 ExternalReference address_of_regexp_stack_memory_size =
3159 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); 3097 ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
3160 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 3098 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3161 __ test(ebx, Operand(ebx)); 3099 __ test(ebx, Operand(ebx));
3162 __ j(zero, &runtime, not_taken); 3100 __ j(zero, &runtime);
3163 3101
3164 // Check that the first argument is a JSRegExp object. 3102 // Check that the first argument is a JSRegExp object.
3165 __ mov(eax, Operand(esp, kJSRegExpOffset)); 3103 __ mov(eax, Operand(esp, kJSRegExpOffset));
3166 STATIC_ASSERT(kSmiTag == 0); 3104 STATIC_ASSERT(kSmiTag == 0);
3167 __ test(eax, Immediate(kSmiTagMask)); 3105 __ test(eax, Immediate(kSmiTagMask));
3168 __ j(zero, &runtime); 3106 __ j(zero, &runtime);
3169 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); 3107 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
3170 __ j(not_equal, &runtime); 3108 __ j(not_equal, &runtime);
3171 // Check that the RegExp has been compiled (data contains a fixed array). 3109 // Check that the RegExp has been compiled (data contains a fixed array).
3172 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); 3110 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
3336 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 3274 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
3337 __ mov(Operand(esp, 5 * kPointerSize), ecx); 3275 __ mov(Operand(esp, 5 * kPointerSize), ecx);
3338 3276
3339 // Argument 5: static offsets vector buffer. 3277 // Argument 5: static offsets vector buffer.
3340 __ mov(Operand(esp, 4 * kPointerSize), 3278 __ mov(Operand(esp, 4 * kPointerSize),
3341 Immediate(ExternalReference::address_of_static_offsets_vector( 3279 Immediate(ExternalReference::address_of_static_offsets_vector(
3342 masm->isolate()))); 3280 masm->isolate())));
3343 3281
3344 // Argument 4: End of string data 3282 // Argument 4: End of string data
3345 // Argument 3: Start of string data 3283 // Argument 3: Start of string data
3346 NearLabel setup_two_byte, setup_rest; 3284 Label setup_two_byte, setup_rest;
3347 __ test(edi, Operand(edi)); 3285 __ test(edi, Operand(edi));
3348 __ mov(edi, FieldOperand(eax, String::kLengthOffset)); 3286 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
3349 __ j(zero, &setup_two_byte); 3287 __ j(zero, &setup_two_byte, Label::kNear);
3350 __ SmiUntag(edi); 3288 __ SmiUntag(edi);
3351 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize)); 3289 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
3352 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 3290 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3353 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize)); 3291 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
3354 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 3292 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3355 __ jmp(&setup_rest); 3293 __ jmp(&setup_rest, Label::kNear);
3356 3294
3357 __ bind(&setup_two_byte); 3295 __ bind(&setup_two_byte);
3358 STATIC_ASSERT(kSmiTag == 0); 3296 STATIC_ASSERT(kSmiTag == 0);
3359 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2). 3297 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
3360 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize)); 3298 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
3361 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 3299 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
3362 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); 3300 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
3363 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 3301 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
3364 3302
3365 __ bind(&setup_rest); 3303 __ bind(&setup_rest);
3366 3304
3367 // Argument 2: Previous index. 3305 // Argument 2: Previous index.
3368 __ mov(Operand(esp, 1 * kPointerSize), ebx); 3306 __ mov(Operand(esp, 1 * kPointerSize), ebx);
3369 3307
3370 // Argument 1: Subject string. 3308 // Argument 1: Subject string.
3371 __ mov(Operand(esp, 0 * kPointerSize), eax); 3309 __ mov(Operand(esp, 0 * kPointerSize), eax);
3372 3310
3373 // Locate the code entry and call it. 3311 // Locate the code entry and call it.
3374 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); 3312 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3375 __ call(Operand(edx)); 3313 __ call(Operand(edx));
3376 3314
3377 // Drop arguments and come back to JS mode. 3315 // Drop arguments and come back to JS mode.
3378 __ LeaveApiExitFrame(); 3316 __ LeaveApiExitFrame();
3379 3317
3380 // Check the result. 3318 // Check the result.
3381 Label success; 3319 Label success;
3382 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS); 3320 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
3383 __ j(equal, &success, taken); 3321 __ j(equal, &success);
3384 Label failure; 3322 Label failure;
3385 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); 3323 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
3386 __ j(equal, &failure, taken); 3324 __ j(equal, &failure);
3387 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); 3325 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
3388 // If not exception it can only be retry. Handle that in the runtime system. 3326 // If not exception it can only be retry. Handle that in the runtime system.
3389 __ j(not_equal, &runtime); 3327 __ j(not_equal, &runtime);
3390 // Result must now be exception. If there is no pending exception already a 3328 // Result must now be exception. If there is no pending exception already a
3391 // stack overflow (on the backtrack stack) was detected in RegExp code but 3329 // stack overflow (on the backtrack stack) was detected in RegExp code but
3392 // haven't created the exception yet. Handle that in the runtime system. 3330 // haven't created the exception yet. Handle that in the runtime system.
3393 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 3331 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
3394 ExternalReference pending_exception(Isolate::k_pending_exception_address, 3332 ExternalReference pending_exception(Isolate::k_pending_exception_address,
3395 masm->isolate()); 3333 masm->isolate());
3396 __ mov(edx, 3334 __ mov(edx,
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
3459 kDontSaveFPRegs); 3397 kDontSaveFPRegs);
3460 3398
3461 // Get the static offsets vector filled by the native regexp code. 3399 // Get the static offsets vector filled by the native regexp code.
3462 ExternalReference address_of_static_offsets_vector = 3400 ExternalReference address_of_static_offsets_vector =
3463 ExternalReference::address_of_static_offsets_vector(masm->isolate()); 3401 ExternalReference::address_of_static_offsets_vector(masm->isolate());
3464 __ mov(ecx, Immediate(address_of_static_offsets_vector)); 3402 __ mov(ecx, Immediate(address_of_static_offsets_vector));
3465 3403
3466 // ebx: last_match_info backing store (FixedArray) 3404 // ebx: last_match_info backing store (FixedArray)
3467 // ecx: offsets vector 3405 // ecx: offsets vector
3468 // edx: number of capture registers 3406 // edx: number of capture registers
3469 NearLabel next_capture, done; 3407 Label next_capture, done;
3470 // Capture register counter starts from number of capture registers and 3408 // Capture register counter starts from number of capture registers and
3471 // counts down until wraping after zero. 3409 // counts down until wraping after zero.
3472 __ bind(&next_capture); 3410 __ bind(&next_capture);
3473 __ sub(Operand(edx), Immediate(1)); 3411 __ sub(Operand(edx), Immediate(1));
3474 __ j(negative, &done); 3412 __ j(negative, &done, Label::kNear);
3475 // Read the value from the static offsets vector buffer. 3413 // Read the value from the static offsets vector buffer.
3476 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); 3414 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
3477 __ SmiTag(edi); 3415 __ SmiTag(edi);
3478 // Store the smi value in the last match info. 3416 // Store the smi value in the last match info.
3479 __ mov(FieldOperand(ebx, 3417 __ mov(FieldOperand(ebx,
3480 edx, 3418 edx,
3481 times_pointer_size, 3419 times_pointer_size,
3482 RegExpImpl::kFirstCaptureOffset), 3420 RegExpImpl::kFirstCaptureOffset),
3483 edi); 3421 edi);
3484 __ jmp(&next_capture); 3422 __ jmp(&next_capture);
3485 __ bind(&done); 3423 __ bind(&done);
3486 3424
3487 // Return last match info. 3425 // Return last match info.
3488 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); 3426 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
3489 __ ret(4 * kPointerSize); 3427 __ ret(4 * kPointerSize);
3490 3428
3491 // Do the runtime call to execute the regexp. 3429 // Do the runtime call to execute the regexp.
3492 __ bind(&runtime); 3430 __ bind(&runtime);
3493 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 3431 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
3494 #endif // V8_INTERPRETED_REGEXP 3432 #endif // V8_INTERPRETED_REGEXP
3495 } 3433 }
3496 3434
3497 3435
3498 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { 3436 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
3499 const int kMaxInlineLength = 100; 3437 const int kMaxInlineLength = 100;
3500 Label slowcase; 3438 Label slowcase;
3501 NearLabel done; 3439 Label done;
3502 __ mov(ebx, Operand(esp, kPointerSize * 3)); 3440 __ mov(ebx, Operand(esp, kPointerSize * 3));
3503 __ test(ebx, Immediate(kSmiTagMask)); 3441 __ test(ebx, Immediate(kSmiTagMask));
3504 __ j(not_zero, &slowcase); 3442 __ j(not_zero, &slowcase);
3505 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength))); 3443 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
3506 __ j(above, &slowcase); 3444 __ j(above, &slowcase);
3507 // Smi-tagging is equivalent to multiplying by 2. 3445 // Smi-tagging is equivalent to multiplying by 2.
3508 STATIC_ASSERT(kSmiTag == 0); 3446 STATIC_ASSERT(kSmiTag == 0);
3509 STATIC_ASSERT(kSmiTagSize == 1); 3447 STATIC_ASSERT(kSmiTagSize == 1);
3510 // Allocate RegExpResult followed by FixedArray with size in ebx. 3448 // Allocate RegExpResult followed by FixedArray with size in ebx.
3511 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] 3449 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3557 __ mov(edx, Immediate(factory->the_hole_value())); 3495 __ mov(edx, Immediate(factory->the_hole_value()));
3558 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize)); 3496 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
3559 // Fill fixed array elements with hole. 3497 // Fill fixed array elements with hole.
3560 // eax: JSArray. 3498 // eax: JSArray.
3561 // ecx: Number of elements to fill. 3499 // ecx: Number of elements to fill.
3562 // ebx: Start of elements in FixedArray. 3500 // ebx: Start of elements in FixedArray.
3563 // edx: the hole. 3501 // edx: the hole.
3564 Label loop; 3502 Label loop;
3565 __ test(ecx, Operand(ecx)); 3503 __ test(ecx, Operand(ecx));
3566 __ bind(&loop); 3504 __ bind(&loop);
3567 __ j(less_equal, &done); // Jump if ecx is negative or zero. 3505 __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero.
3568 __ sub(Operand(ecx), Immediate(1)); 3506 __ sub(Operand(ecx), Immediate(1));
3569 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx); 3507 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
3570 __ jmp(&loop); 3508 __ jmp(&loop);
3571 3509
3572 __ bind(&done); 3510 __ bind(&done);
3573 __ ret(3 * kPointerSize); 3511 __ ret(3 * kPointerSize);
3574 3512
3575 __ bind(&slowcase); 3513 __ bind(&slowcase);
3576 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 3514 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3577 } 3515 }
(...skipping 20 matching lines...) Expand all
3598 // Make the hash mask from the length of the number string cache. It 3536 // Make the hash mask from the length of the number string cache. It
3599 // contains two elements (number and string) for each cache entry. 3537 // contains two elements (number and string) for each cache entry.
3600 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 3538 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3601 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two. 3539 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
3602 __ sub(Operand(mask), Immediate(1)); // Make mask. 3540 __ sub(Operand(mask), Immediate(1)); // Make mask.
3603 3541
3604 // Calculate the entry in the number string cache. The hash value in the 3542 // Calculate the entry in the number string cache. The hash value in the
3605 // number string cache for smis is just the smi value, and the hash for 3543 // number string cache for smis is just the smi value, and the hash for
3606 // doubles is the xor of the upper and lower words. See 3544 // doubles is the xor of the upper and lower words. See
3607 // Heap::GetNumberStringCache. 3545 // Heap::GetNumberStringCache.
3608 NearLabel smi_hash_calculated; 3546 Label smi_hash_calculated;
3609 NearLabel load_result_from_cache; 3547 Label load_result_from_cache;
3610 if (object_is_smi) { 3548 if (object_is_smi) {
3611 __ mov(scratch, object); 3549 __ mov(scratch, object);
3612 __ SmiUntag(scratch); 3550 __ SmiUntag(scratch);
3613 } else { 3551 } else {
3614 NearLabel not_smi, hash_calculated; 3552 Label not_smi;
3615 STATIC_ASSERT(kSmiTag == 0); 3553 STATIC_ASSERT(kSmiTag == 0);
3616 __ test(object, Immediate(kSmiTagMask)); 3554 __ test(object, Immediate(kSmiTagMask));
3617 __ j(not_zero, &not_smi); 3555 __ j(not_zero, &not_smi, Label::kNear);
3618 __ mov(scratch, object); 3556 __ mov(scratch, object);
3619 __ SmiUntag(scratch); 3557 __ SmiUntag(scratch);
3620 __ jmp(&smi_hash_calculated); 3558 __ jmp(&smi_hash_calculated, Label::kNear);
3621 __ bind(&not_smi); 3559 __ bind(&not_smi);
3622 __ cmp(FieldOperand(object, HeapObject::kMapOffset), 3560 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
3623 masm->isolate()->factory()->heap_number_map()); 3561 masm->isolate()->factory()->heap_number_map());
3624 __ j(not_equal, not_found); 3562 __ j(not_equal, not_found);
3625 STATIC_ASSERT(8 == kDoubleSize); 3563 STATIC_ASSERT(8 == kDoubleSize);
3626 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset)); 3564 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
3627 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); 3565 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
3628 // Object is heap number and hash is now in scratch. Calculate cache index. 3566 // Object is heap number and hash is now in scratch. Calculate cache index.
3629 __ and_(scratch, Operand(mask)); 3567 __ and_(scratch, Operand(mask));
3630 Register index = scratch; 3568 Register index = scratch;
(...skipping 10 matching lines...) Expand all
3641 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); 3579 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
3642 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); 3580 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
3643 __ ucomisd(xmm0, xmm1); 3581 __ ucomisd(xmm0, xmm1);
3644 } else { 3582 } else {
3645 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset)); 3583 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
3646 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset)); 3584 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
3647 __ FCmp(); 3585 __ FCmp();
3648 } 3586 }
3649 __ j(parity_even, not_found); // Bail out if NaN is involved. 3587 __ j(parity_even, not_found); // Bail out if NaN is involved.
3650 __ j(not_equal, not_found); // The cache did not contain this value. 3588 __ j(not_equal, not_found); // The cache did not contain this value.
3651 __ jmp(&load_result_from_cache); 3589 __ jmp(&load_result_from_cache, Label::kNear);
3652 } 3590 }
3653 3591
3654 __ bind(&smi_hash_calculated); 3592 __ bind(&smi_hash_calculated);
3655 // Object is smi and hash is now in scratch. Calculate cache index. 3593 // Object is smi and hash is now in scratch. Calculate cache index.
3656 __ and_(scratch, Operand(mask)); 3594 __ and_(scratch, Operand(mask));
3657 Register index = scratch; 3595 Register index = scratch;
3658 // Check if the entry is the smi we are looking for. 3596 // Check if the entry is the smi we are looking for.
3659 __ cmp(object, 3597 __ cmp(object,
3660 FieldOperand(number_string_cache, 3598 FieldOperand(number_string_cache,
3661 index, 3599 index,
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3701 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); 3639 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
3702 3640
3703 Label check_unequal_objects, done; 3641 Label check_unequal_objects, done;
3704 3642
3705 // Compare two smis if required. 3643 // Compare two smis if required.
3706 if (include_smi_compare_) { 3644 if (include_smi_compare_) {
3707 Label non_smi, smi_done; 3645 Label non_smi, smi_done;
3708 __ mov(ecx, Operand(edx)); 3646 __ mov(ecx, Operand(edx));
3709 __ or_(ecx, Operand(eax)); 3647 __ or_(ecx, Operand(eax));
3710 __ test(ecx, Immediate(kSmiTagMask)); 3648 __ test(ecx, Immediate(kSmiTagMask));
3711 __ j(not_zero, &non_smi, not_taken); 3649 __ j(not_zero, &non_smi);
3712 __ sub(edx, Operand(eax)); // Return on the result of the subtraction. 3650 __ sub(edx, Operand(eax)); // Return on the result of the subtraction.
3713 __ j(no_overflow, &smi_done); 3651 __ j(no_overflow, &smi_done);
3714 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. 3652 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
3715 __ bind(&smi_done); 3653 __ bind(&smi_done);
3716 __ mov(eax, edx); 3654 __ mov(eax, edx);
3717 __ ret(0); 3655 __ ret(0);
3718 __ bind(&non_smi); 3656 __ bind(&non_smi);
3719 } else if (FLAG_debug_code) { 3657 } else if (FLAG_debug_code) {
3720 __ mov(ecx, Operand(edx)); 3658 __ mov(ecx, Operand(edx));
3721 __ or_(ecx, Operand(eax)); 3659 __ or_(ecx, Operand(eax));
3722 __ test(ecx, Immediate(kSmiTagMask)); 3660 __ test(ecx, Immediate(kSmiTagMask));
3723 __ Assert(not_zero, "Unexpected smi operands."); 3661 __ Assert(not_zero, "Unexpected smi operands.");
3724 } 3662 }
3725 3663
3726 // NOTICE! This code is only reached after a smi-fast-case check, so 3664 // NOTICE! This code is only reached after a smi-fast-case check, so
3727 // it is certain that at least one operand isn't a smi. 3665 // it is certain that at least one operand isn't a smi.
3728 3666
3729 // Identical objects can be compared fast, but there are some tricky cases 3667 // Identical objects can be compared fast, but there are some tricky cases
3730 // for NaN and undefined. 3668 // for NaN and undefined.
3731 { 3669 {
3732 Label not_identical; 3670 Label not_identical;
3733 __ cmp(eax, Operand(edx)); 3671 __ cmp(eax, Operand(edx));
3734 __ j(not_equal, &not_identical); 3672 __ j(not_equal, &not_identical);
3735 3673
3736 if (cc_ != equal) { 3674 if (cc_ != equal) {
3737 // Check for undefined. undefined OP undefined is false even though 3675 // Check for undefined. undefined OP undefined is false even though
3738 // undefined == undefined. 3676 // undefined == undefined.
3739 NearLabel check_for_nan; 3677 Label check_for_nan;
3740 __ cmp(edx, masm->isolate()->factory()->undefined_value()); 3678 __ cmp(edx, masm->isolate()->factory()->undefined_value());
3741 __ j(not_equal, &check_for_nan); 3679 __ j(not_equal, &check_for_nan, Label::kNear);
3742 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_)))); 3680 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3743 __ ret(0); 3681 __ ret(0);
3744 __ bind(&check_for_nan); 3682 __ bind(&check_for_nan);
3745 } 3683 }
3746 3684
3747 // Test for NaN. Sadly, we can't just compare to factory->nan_value(), 3685 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
3748 // so we do the second best thing - test it ourselves. 3686 // so we do the second best thing - test it ourselves.
3749 // Note: if cc_ != equal, never_nan_nan_ is not used. 3687 // Note: if cc_ != equal, never_nan_nan_ is not used.
3750 if (never_nan_nan_ && (cc_ == equal)) { 3688 if (never_nan_nan_ && (cc_ == equal)) {
3751 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 3689 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3752 __ ret(0); 3690 __ ret(0);
3753 } else { 3691 } else {
3754 NearLabel heap_number; 3692 Label heap_number;
3755 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 3693 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3756 Immediate(masm->isolate()->factory()->heap_number_map())); 3694 Immediate(masm->isolate()->factory()->heap_number_map()));
3757 __ j(equal, &heap_number); 3695 __ j(equal, &heap_number, Label::kNear);
3758 if (cc_ != equal) { 3696 if (cc_ != equal) {
3759 // Call runtime on identical JSObjects. Otherwise return equal. 3697 // Call runtime on identical JSObjects. Otherwise return equal.
3760 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); 3698 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3761 __ j(above_equal, &not_identical); 3699 __ j(above_equal, &not_identical);
3762 } 3700 }
3763 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 3701 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3764 __ ret(0); 3702 __ ret(0);
3765 3703
3766 __ bind(&heap_number); 3704 __ bind(&heap_number);
3767 // It is a heap number, so return non-equal if it's NaN and equal if 3705 // It is a heap number, so return non-equal if it's NaN and equal if
(...skipping 11 matching lines...) Expand all
3779 __ Set(eax, Immediate(0)); 3717 __ Set(eax, Immediate(0));
3780 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost 3718 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
3781 // bits. 3719 // bits.
3782 __ add(edx, Operand(edx)); 3720 __ add(edx, Operand(edx));
3783 __ cmp(edx, kQuietNaNHighBitsMask << 1); 3721 __ cmp(edx, kQuietNaNHighBitsMask << 1);
3784 if (cc_ == equal) { 3722 if (cc_ == equal) {
3785 STATIC_ASSERT(EQUAL != 1); 3723 STATIC_ASSERT(EQUAL != 1);
3786 __ setcc(above_equal, eax); 3724 __ setcc(above_equal, eax);
3787 __ ret(0); 3725 __ ret(0);
3788 } else { 3726 } else {
3789 NearLabel nan; 3727 Label nan;
3790 __ j(above_equal, &nan); 3728 __ j(above_equal, &nan, Label::kNear);
3791 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 3729 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
3792 __ ret(0); 3730 __ ret(0);
3793 __ bind(&nan); 3731 __ bind(&nan);
3794 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_)))); 3732 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
3795 __ ret(0); 3733 __ ret(0);
3796 } 3734 }
3797 } 3735 }
3798 3736
3799 __ bind(&not_identical); 3737 __ bind(&not_identical);
3800 } 3738 }
3801 3739
3802 // Strict equality can quickly decide whether objects are equal. 3740 // Strict equality can quickly decide whether objects are equal.
3803 // Non-strict object equality is slower, so it is handled later in the stub. 3741 // Non-strict object equality is slower, so it is handled later in the stub.
3804 if (cc_ == equal && strict_) { 3742 if (cc_ == equal && strict_) {
3805 Label slow; // Fallthrough label. 3743 Label slow; // Fallthrough label.
3806 NearLabel not_smis; 3744 Label not_smis;
3807 // If we're doing a strict equality comparison, we don't have to do 3745 // If we're doing a strict equality comparison, we don't have to do
3808 // type conversion, so we generate code to do fast comparison for objects 3746 // type conversion, so we generate code to do fast comparison for objects
3809 // and oddballs. Non-smi numbers and strings still go through the usual 3747 // and oddballs. Non-smi numbers and strings still go through the usual
3810 // slow-case code. 3748 // slow-case code.
3811 // If either is a Smi (we know that not both are), then they can only 3749 // If either is a Smi (we know that not both are), then they can only
3812 // be equal if the other is a HeapNumber. If so, use the slow case. 3750 // be equal if the other is a HeapNumber. If so, use the slow case.
3813 STATIC_ASSERT(kSmiTag == 0); 3751 STATIC_ASSERT(kSmiTag == 0);
3814 ASSERT_EQ(0, Smi::FromInt(0)); 3752 ASSERT_EQ(0, Smi::FromInt(0));
3815 __ mov(ecx, Immediate(kSmiTagMask)); 3753 __ mov(ecx, Immediate(kSmiTagMask));
3816 __ and_(ecx, Operand(eax)); 3754 __ and_(ecx, Operand(eax));
3817 __ test(ecx, Operand(edx)); 3755 __ test(ecx, Operand(edx));
3818 __ j(not_zero, &not_smis); 3756 __ j(not_zero, &not_smis, Label::kNear);
3819 // One operand is a smi. 3757 // One operand is a smi.
3820 3758
3821 // Check whether the non-smi is a heap number. 3759 // Check whether the non-smi is a heap number.
3822 STATIC_ASSERT(kSmiTagMask == 1); 3760 STATIC_ASSERT(kSmiTagMask == 1);
3823 // ecx still holds eax & kSmiTag, which is either zero or one. 3761 // ecx still holds eax & kSmiTag, which is either zero or one.
3824 __ sub(Operand(ecx), Immediate(0x01)); 3762 __ sub(Operand(ecx), Immediate(0x01));
3825 __ mov(ebx, edx); 3763 __ mov(ebx, edx);
3826 __ xor_(ebx, Operand(eax)); 3764 __ xor_(ebx, Operand(eax));
3827 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx. 3765 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
3828 __ xor_(ebx, Operand(eax)); 3766 __ xor_(ebx, Operand(eax));
3829 // if eax was smi, ebx is now edx, else eax. 3767 // if eax was smi, ebx is now edx, else eax.
3830 3768
3831 // Check if the non-smi operand is a heap number. 3769 // Check if the non-smi operand is a heap number.
3832 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 3770 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
3833 Immediate(masm->isolate()->factory()->heap_number_map())); 3771 Immediate(masm->isolate()->factory()->heap_number_map()));
3834 // If heap number, handle it in the slow case. 3772 // If heap number, handle it in the slow case.
3835 __ j(equal, &slow); 3773 __ j(equal, &slow);
3836 // Return non-equal (ebx is not zero) 3774 // Return non-equal (ebx is not zero)
3837 __ mov(eax, ebx); 3775 __ mov(eax, ebx);
3838 __ ret(0); 3776 __ ret(0);
3839 3777
3840 __ bind(&not_smis); 3778 __ bind(&not_smis);
3841 // If either operand is a JSObject or an oddball value, then they are not 3779 // If either operand is a JSObject or an oddball value, then they are not
3842 // equal since their pointers are different 3780 // equal since their pointers are different
3843 // There is no test for undetectability in strict equality. 3781 // There is no test for undetectability in strict equality.
3844 3782
3845 // Get the type of the first operand. 3783 // Get the type of the first operand.
3846 // If the first object is a JS object, we have done pointer comparison. 3784 // If the first object is a JS object, we have done pointer comparison.
3847 NearLabel first_non_object; 3785 Label first_non_object;
3848 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 3786 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3849 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); 3787 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3850 __ j(below, &first_non_object); 3788 __ j(below, &first_non_object, Label::kNear);
3851 3789
3852 // Return non-zero (eax is not zero) 3790 // Return non-zero (eax is not zero)
3853 NearLabel return_not_equal; 3791 Label return_not_equal;
3854 STATIC_ASSERT(kHeapObjectTag != 0); 3792 STATIC_ASSERT(kHeapObjectTag != 0);
3855 __ bind(&return_not_equal); 3793 __ bind(&return_not_equal);
3856 __ ret(0); 3794 __ ret(0);
3857 3795
3858 __ bind(&first_non_object); 3796 __ bind(&first_non_object);
3859 // Check for oddballs: true, false, null, undefined. 3797 // Check for oddballs: true, false, null, undefined.
3860 __ CmpInstanceType(ecx, ODDBALL_TYPE); 3798 __ CmpInstanceType(ecx, ODDBALL_TYPE);
3861 __ j(equal, &return_not_equal); 3799 __ j(equal, &return_not_equal);
3862 3800
3863 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx); 3801 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
(...skipping 12 matching lines...) Expand all
3876 Label non_number_comparison; 3814 Label non_number_comparison;
3877 Label unordered; 3815 Label unordered;
3878 if (CpuFeatures::IsSupported(SSE2)) { 3816 if (CpuFeatures::IsSupported(SSE2)) {
3879 CpuFeatures::Scope use_sse2(SSE2); 3817 CpuFeatures::Scope use_sse2(SSE2);
3880 CpuFeatures::Scope use_cmov(CMOV); 3818 CpuFeatures::Scope use_cmov(CMOV);
3881 3819
3882 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison); 3820 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
3883 __ ucomisd(xmm0, xmm1); 3821 __ ucomisd(xmm0, xmm1);
3884 3822
3885 // Don't base result on EFLAGS when a NaN is involved. 3823 // Don't base result on EFLAGS when a NaN is involved.
3886 __ j(parity_even, &unordered, not_taken); 3824 __ j(parity_even, &unordered);
3887 // Return a result of -1, 0, or 1, based on EFLAGS. 3825 // Return a result of -1, 0, or 1, based on EFLAGS.
3888 __ mov(eax, 0); // equal 3826 __ mov(eax, 0); // equal
3889 __ mov(ecx, Immediate(Smi::FromInt(1))); 3827 __ mov(ecx, Immediate(Smi::FromInt(1)));
3890 __ cmov(above, eax, Operand(ecx)); 3828 __ cmov(above, eax, Operand(ecx));
3891 __ mov(ecx, Immediate(Smi::FromInt(-1))); 3829 __ mov(ecx, Immediate(Smi::FromInt(-1)));
3892 __ cmov(below, eax, Operand(ecx)); 3830 __ cmov(below, eax, Operand(ecx));
3893 __ ret(0); 3831 __ ret(0);
3894 } else { 3832 } else {
3895 FloatingPointHelper::CheckFloatOperands( 3833 FloatingPointHelper::CheckFloatOperands(
3896 masm, &non_number_comparison, ebx); 3834 masm, &non_number_comparison, ebx);
3897 FloatingPointHelper::LoadFloatOperand(masm, eax); 3835 FloatingPointHelper::LoadFloatOperand(masm, eax);
3898 FloatingPointHelper::LoadFloatOperand(masm, edx); 3836 FloatingPointHelper::LoadFloatOperand(masm, edx);
3899 __ FCmp(); 3837 __ FCmp();
3900 3838
3901 // Don't base result on EFLAGS when a NaN is involved. 3839 // Don't base result on EFLAGS when a NaN is involved.
3902 __ j(parity_even, &unordered, not_taken); 3840 __ j(parity_even, &unordered);
3903 3841
3904 NearLabel below_label, above_label; 3842 Label below_label, above_label;
3905 // Return a result of -1, 0, or 1, based on EFLAGS. 3843 // Return a result of -1, 0, or 1, based on EFLAGS.
3906 __ j(below, &below_label, not_taken); 3844 __ j(below, &below_label);
3907 __ j(above, &above_label, not_taken); 3845 __ j(above, &above_label);
3908 3846
3909 __ Set(eax, Immediate(0)); 3847 __ Set(eax, Immediate(0));
3910 __ ret(0); 3848 __ ret(0);
3911 3849
3912 __ bind(&below_label); 3850 __ bind(&below_label);
3913 __ mov(eax, Immediate(Smi::FromInt(-1))); 3851 __ mov(eax, Immediate(Smi::FromInt(-1)));
3914 __ ret(0); 3852 __ ret(0);
3915 3853
3916 __ bind(&above_label); 3854 __ bind(&above_label);
3917 __ mov(eax, Immediate(Smi::FromInt(1))); 3855 __ mov(eax, Immediate(Smi::FromInt(1)));
(...skipping 26 matching lines...) Expand all
3944 // non-zero value, which indicates not equal, so just return. 3882 // non-zero value, which indicates not equal, so just return.
3945 __ ret(0); 3883 __ ret(0);
3946 } 3884 }
3947 3885
3948 __ bind(&check_for_strings); 3886 __ bind(&check_for_strings);
3949 3887
3950 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, 3888 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
3951 &check_unequal_objects); 3889 &check_unequal_objects);
3952 3890
3953 // Inline comparison of ascii strings. 3891 // Inline comparison of ascii strings.
3954 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, 3892 if (cc_ == equal) {
3893 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
3955 edx, 3894 edx,
3956 eax, 3895 eax,
3957 ecx, 3896 ecx,
3958 ebx, 3897 ebx);
3959 edi); 3898 } else {
3899 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
3900 edx,
3901 eax,
3902 ecx,
3903 ebx,
3904 edi);
3905 }
3960 #ifdef DEBUG 3906 #ifdef DEBUG
3961 __ Abort("Unexpected fall-through from string comparison"); 3907 __ Abort("Unexpected fall-through from string comparison");
3962 #endif 3908 #endif
3963 3909
3964 __ bind(&check_unequal_objects); 3910 __ bind(&check_unequal_objects);
3965 if (cc_ == equal && !strict_) { 3911 if (cc_ == equal && !strict_) {
3966 // Non-strict equality. Objects are unequal if 3912 // Non-strict equality. Objects are unequal if
3967 // they are both JSObjects and not undetectable, 3913 // they are both JSObjects and not undetectable,
3968 // and their pointers are different. 3914 // and their pointers are different.
3969 NearLabel not_both_objects; 3915 Label not_both_objects;
3970 NearLabel return_unequal; 3916 Label return_unequal;
3971 // At most one is a smi, so we can test for smi by adding the two. 3917 // At most one is a smi, so we can test for smi by adding the two.
3972 // A smi plus a heap object has the low bit set, a heap object plus 3918 // A smi plus a heap object has the low bit set, a heap object plus
3973 // a heap object has the low bit clear. 3919 // a heap object has the low bit clear.
3974 STATIC_ASSERT(kSmiTag == 0); 3920 STATIC_ASSERT(kSmiTag == 0);
3975 STATIC_ASSERT(kSmiTagMask == 1); 3921 STATIC_ASSERT(kSmiTagMask == 1);
3976 __ lea(ecx, Operand(eax, edx, times_1, 0)); 3922 __ lea(ecx, Operand(eax, edx, times_1, 0));
3977 __ test(ecx, Immediate(kSmiTagMask)); 3923 __ test(ecx, Immediate(kSmiTagMask));
3978 __ j(not_zero, &not_both_objects); 3924 __ j(not_zero, &not_both_objects, Label::kNear);
3979 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx); 3925 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3980 __ j(below, &not_both_objects); 3926 __ j(below, &not_both_objects, Label::kNear);
3981 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx); 3927 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
3982 __ j(below, &not_both_objects); 3928 __ j(below, &not_both_objects, Label::kNear);
3983 // We do not bail out after this point. Both are JSObjects, and 3929 // We do not bail out after this point. Both are JSObjects, and
3984 // they are equal if and only if both are undetectable. 3930 // they are equal if and only if both are undetectable.
3985 // The and of the undetectable flags is 1 if and only if they are equal. 3931 // The and of the undetectable flags is 1 if and only if they are equal.
3986 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 3932 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
3987 1 << Map::kIsUndetectable); 3933 1 << Map::kIsUndetectable);
3988 __ j(zero, &return_unequal); 3934 __ j(zero, &return_unequal, Label::kNear);
3989 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset), 3935 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
3990 1 << Map::kIsUndetectable); 3936 1 << Map::kIsUndetectable);
3991 __ j(zero, &return_unequal); 3937 __ j(zero, &return_unequal, Label::kNear);
3992 // The objects are both undetectable, so they both compare as the value 3938 // The objects are both undetectable, so they both compare as the value
3993 // undefined, and are equal. 3939 // undefined, and are equal.
3994 __ Set(eax, Immediate(EQUAL)); 3940 __ Set(eax, Immediate(EQUAL));
3995 __ bind(&return_unequal); 3941 __ bind(&return_unequal);
3996 // Return non-equal by returning the non-zero object pointer in eax, 3942 // Return non-equal by returning the non-zero object pointer in eax,
3997 // or return equal if we fell through to here. 3943 // or return equal if we fell through to here.
3998 __ ret(0); // rax, rdx were pushed 3944 __ ret(0); // rax, rdx were pushed
3999 __ bind(&not_both_objects); 3945 __ bind(&not_both_objects);
4000 } 3946 }
4001 3947
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
4047 // If the receiver might be a value (string, number or boolean) check for this 3993 // If the receiver might be a value (string, number or boolean) check for this
4048 // and box it if it is. 3994 // and box it if it is.
4049 if (ReceiverMightBeValue()) { 3995 if (ReceiverMightBeValue()) {
4050 // Get the receiver from the stack. 3996 // Get the receiver from the stack.
4051 // +1 ~ return address 3997 // +1 ~ return address
4052 Label receiver_is_value, receiver_is_js_object; 3998 Label receiver_is_value, receiver_is_js_object;
4053 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize)); 3999 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
4054 4000
4055 // Check if receiver is a smi (which is a number value). 4001 // Check if receiver is a smi (which is a number value).
4056 __ test(eax, Immediate(kSmiTagMask)); 4002 __ test(eax, Immediate(kSmiTagMask));
4057 __ j(zero, &receiver_is_value, not_taken); 4003 __ j(zero, &receiver_is_value);
4058 4004
4059 // Check if the receiver is a valid JS object. 4005 // Check if the receiver is a valid JS object.
4060 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi); 4006 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
4061 __ j(above_equal, &receiver_is_js_object); 4007 __ j(above_equal, &receiver_is_js_object);
4062 4008
4063 // Call the runtime to box the value. 4009 // Call the runtime to box the value.
4064 __ bind(&receiver_is_value); 4010 __ bind(&receiver_is_value);
4065 __ EnterInternalFrame(); 4011 __ EnterInternalFrame();
4066 __ push(eax); 4012 __ push(eax);
4067 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 4013 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
4068 __ LeaveInternalFrame(); 4014 __ LeaveInternalFrame();
4069 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax); 4015 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
4070 4016
4071 __ bind(&receiver_is_js_object); 4017 __ bind(&receiver_is_js_object);
4072 } 4018 }
4073 4019
4074 // Get the function to call from the stack. 4020 // Get the function to call from the stack.
4075 // +2 ~ receiver, return address 4021 // +2 ~ receiver, return address
4076 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize)); 4022 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
4077 4023
4078 // Check that the function really is a JavaScript function. 4024 // Check that the function really is a JavaScript function.
4079 __ test(edi, Immediate(kSmiTagMask)); 4025 __ test(edi, Immediate(kSmiTagMask));
4080 __ j(zero, &slow, not_taken); 4026 __ j(zero, &slow);
4081 // Goto slow case if we do not have a function. 4027 // Goto slow case if we do not have a function.
4082 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 4028 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
4083 __ j(not_equal, &slow, not_taken); 4029 __ j(not_equal, &slow);
4084 4030
4085 // Fast-case: Just invoke the function. 4031 // Fast-case: Just invoke the function.
4086 ParameterCount actual(argc_); 4032 ParameterCount actual(argc_);
4087 __ InvokeFunction(edi, actual, JUMP_FUNCTION); 4033 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
4088 4034
4089 // Slow-case: Non-function called. 4035 // Slow-case: Non-function called.
4090 __ bind(&slow); 4036 __ bind(&slow);
4091 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 4037 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
4092 // of the original receiver from the call site). 4038 // of the original receiver from the call site).
4093 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); 4039 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
4154 __ call(Operand(ebx)); 4100 __ call(Operand(ebx));
4155 // Result is in eax or edx:eax - do not destroy these registers! 4101 // Result is in eax or edx:eax - do not destroy these registers!
4156 4102
4157 if (always_allocate_scope) { 4103 if (always_allocate_scope) {
4158 __ dec(Operand::StaticVariable(scope_depth)); 4104 __ dec(Operand::StaticVariable(scope_depth));
4159 } 4105 }
4160 4106
4161 // Make sure we're not trying to return 'the hole' from the runtime 4107 // Make sure we're not trying to return 'the hole' from the runtime
4162 // call as this may lead to crashes in the IC code later. 4108 // call as this may lead to crashes in the IC code later.
4163 if (FLAG_debug_code) { 4109 if (FLAG_debug_code) {
4164 NearLabel okay; 4110 Label okay;
4165 __ cmp(eax, masm->isolate()->factory()->the_hole_value()); 4111 __ cmp(eax, masm->isolate()->factory()->the_hole_value());
4166 __ j(not_equal, &okay); 4112 __ j(not_equal, &okay, Label::kNear);
4167 __ int3(); 4113 __ int3();
4168 __ bind(&okay); 4114 __ bind(&okay);
4169 } 4115 }
4170 4116
4171 // Check for failure result. 4117 // Check for failure result.
4172 Label failure_returned; 4118 Label failure_returned;
4173 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); 4119 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
4174 __ lea(ecx, Operand(eax, 1)); 4120 __ lea(ecx, Operand(eax, 1));
4175 // Lower 2 bits of ecx are 0 iff eax has failure tag. 4121 // Lower 2 bits of ecx are 0 iff eax has failure tag.
4176 __ test(ecx, Immediate(kFailureTagMask)); 4122 __ test(ecx, Immediate(kFailureTagMask));
4177 __ j(zero, &failure_returned, not_taken); 4123 __ j(zero, &failure_returned);
4178 4124
4179 ExternalReference pending_exception_address( 4125 ExternalReference pending_exception_address(
4180 Isolate::k_pending_exception_address, masm->isolate()); 4126 Isolate::k_pending_exception_address, masm->isolate());
4181 4127
4182 // Check that there is no pending exception, otherwise we 4128 // Check that there is no pending exception, otherwise we
4183 // should have returned some failure value. 4129 // should have returned some failure value.
4184 if (FLAG_debug_code) { 4130 if (FLAG_debug_code) {
4185 __ push(edx); 4131 __ push(edx);
4186 __ mov(edx, Operand::StaticVariable( 4132 __ mov(edx, Operand::StaticVariable(
4187 ExternalReference::the_hole_value_location(masm->isolate()))); 4133 ExternalReference::the_hole_value_location(masm->isolate())));
4188 NearLabel okay; 4134 Label okay;
4189 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); 4135 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
4190 // Cannot use check here as it attempts to generate call into runtime. 4136 // Cannot use check here as it attempts to generate call into runtime.
4191 __ j(equal, &okay); 4137 __ j(equal, &okay, Label::kNear);
4192 __ int3(); 4138 __ int3();
4193 __ bind(&okay); 4139 __ bind(&okay);
4194 __ pop(edx); 4140 __ pop(edx);
4195 } 4141 }
4196 4142
4197 // Exit the JavaScript to C++ exit frame. 4143 // Exit the JavaScript to C++ exit frame.
4198 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); 4144 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs);
4199 __ ret(0); 4145 __ ret(0);
4200 4146
4201 // Handling of failure. 4147 // Handling of failure.
4202 __ bind(&failure_returned); 4148 __ bind(&failure_returned);
4203 4149
4204 Label retry; 4150 Label retry;
4205 // If the returned exception is RETRY_AFTER_GC continue at retry label 4151 // If the returned exception is RETRY_AFTER_GC continue at retry label
4206 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); 4152 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
4207 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); 4153 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
4208 __ j(zero, &retry, taken); 4154 __ j(zero, &retry);
4209 4155
4210 // Special handling of out of memory exceptions. 4156 // Special handling of out of memory exceptions.
4211 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); 4157 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
4212 __ j(equal, throw_out_of_memory_exception); 4158 __ j(equal, throw_out_of_memory_exception);
4213 4159
4214 // Retrieve the pending exception and clear the variable. 4160 // Retrieve the pending exception and clear the variable.
4215 ExternalReference the_hole_location = 4161 ExternalReference the_hole_location =
4216 ExternalReference::the_hole_value_location(masm->isolate()); 4162 ExternalReference::the_hole_value_location(masm->isolate());
4217 __ mov(eax, Operand::StaticVariable(pending_exception_address)); 4163 __ mov(eax, Operand::StaticVariable(pending_exception_address));
4218 __ mov(edx, Operand::StaticVariable(the_hole_location)); 4164 __ mov(edx, Operand::StaticVariable(the_hole_location));
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
4323 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate()); 4269 ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
4324 __ push(Operand::StaticVariable(c_entry_fp)); 4270 __ push(Operand::StaticVariable(c_entry_fp));
4325 4271
4326 #ifdef ENABLE_LOGGING_AND_PROFILING 4272 #ifdef ENABLE_LOGGING_AND_PROFILING
4327 // If this is the outermost JS call, set js_entry_sp value. 4273 // If this is the outermost JS call, set js_entry_sp value.
4328 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, 4274 ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
4329 masm->isolate()); 4275 masm->isolate());
4330 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); 4276 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
4331 __ j(not_equal, &not_outermost_js); 4277 __ j(not_equal, &not_outermost_js);
4332 __ mov(Operand::StaticVariable(js_entry_sp), ebp); 4278 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
4279 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4280 Label cont;
4281 __ jmp(&cont);
4333 __ bind(&not_outermost_js); 4282 __ bind(&not_outermost_js);
4283 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
4284 __ bind(&cont);
4334 #endif 4285 #endif
4335 4286
4336 // Call a faked try-block that does the invoke. 4287 // Call a faked try-block that does the invoke.
4337 __ call(&invoke); 4288 __ call(&invoke);
4338 4289
4339 // Caught exception: Store result (exception) in the pending 4290 // Caught exception: Store result (exception) in the pending
4340 // exception field in the JSEnv and return a failure sentinel. 4291 // exception field in the JSEnv and return a failure sentinel.
4341 ExternalReference pending_exception(Isolate::k_pending_exception_address, 4292 ExternalReference pending_exception(Isolate::k_pending_exception_address,
4342 masm->isolate()); 4293 masm->isolate());
4343 __ mov(Operand::StaticVariable(pending_exception), eax); 4294 __ mov(Operand::StaticVariable(pending_exception), eax);
(...skipping 25 matching lines...) Expand all
4369 } else { 4320 } else {
4370 ExternalReference entry(Builtins::kJSEntryTrampoline, 4321 ExternalReference entry(Builtins::kJSEntryTrampoline,
4371 masm->isolate()); 4322 masm->isolate());
4372 __ mov(edx, Immediate(entry)); 4323 __ mov(edx, Immediate(entry));
4373 } 4324 }
4374 __ mov(edx, Operand(edx, 0)); // deref address 4325 __ mov(edx, Operand(edx, 0)); // deref address
4375 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); 4326 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
4376 __ call(Operand(edx)); 4327 __ call(Operand(edx));
4377 4328
4378 // Unlink this frame from the handler chain. 4329 // Unlink this frame from the handler chain.
4379 __ pop(Operand::StaticVariable(ExternalReference( 4330 __ PopTryHandler();
4380 Isolate::k_handler_address,
4381 masm->isolate())));
4382 // Pop next_sp.
4383 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
4384 4331
4332 __ bind(&exit);
4385 #ifdef ENABLE_LOGGING_AND_PROFILING 4333 #ifdef ENABLE_LOGGING_AND_PROFILING
4386 // If current EBP value is the same as js_entry_sp value, it means that 4334 // Check if the current stack frame is marked as the outermost JS frame.
4387 // the current function is the outermost. 4335 __ pop(ebx);
4388 __ cmp(ebp, Operand::StaticVariable(js_entry_sp)); 4336 __ cmp(Operand(ebx),
4337 Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
4389 __ j(not_equal, &not_outermost_js_2); 4338 __ j(not_equal, &not_outermost_js_2);
4390 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); 4339 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
4391 __ bind(&not_outermost_js_2); 4340 __ bind(&not_outermost_js_2);
4392 #endif 4341 #endif
4393 4342
4394 // Restore the top frame descriptor from the stack. 4343 // Restore the top frame descriptor from the stack.
4395 __ bind(&exit);
4396 __ pop(Operand::StaticVariable(ExternalReference( 4344 __ pop(Operand::StaticVariable(ExternalReference(
4397 Isolate::k_c_entry_fp_address, 4345 Isolate::k_c_entry_fp_address,
4398 masm->isolate()))); 4346 masm->isolate())));
4399 4347
4400 // Restore callee-saved registers (C calling conventions). 4348 // Restore callee-saved registers (C calling conventions).
4401 __ pop(ebx); 4349 __ pop(ebx);
4402 __ pop(esi); 4350 __ pop(esi);
4403 __ pop(edi); 4351 __ pop(edi);
4404 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers 4352 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
4405 4353
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
4452 4400
4453 // Get the object and function - they are always both needed. 4401 // Get the object and function - they are always both needed.
4454 Label slow, not_js_object; 4402 Label slow, not_js_object;
4455 if (!HasArgsInRegisters()) { 4403 if (!HasArgsInRegisters()) {
4456 __ mov(object, Operand(esp, 2 * kPointerSize)); 4404 __ mov(object, Operand(esp, 2 * kPointerSize));
4457 __ mov(function, Operand(esp, 1 * kPointerSize)); 4405 __ mov(function, Operand(esp, 1 * kPointerSize));
4458 } 4406 }
4459 4407
4460 // Check that the left hand is a JS object. 4408 // Check that the left hand is a JS object.
4461 __ test(object, Immediate(kSmiTagMask)); 4409 __ test(object, Immediate(kSmiTagMask));
4462 __ j(zero, &not_js_object, not_taken); 4410 __ j(zero, &not_js_object);
4463 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); 4411 __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
4464 4412
4465 // If there is a call site cache don't look in the global cache, but do the 4413 // If there is a call site cache don't look in the global cache, but do the
4466 // real lookup and update the call site cache. 4414 // real lookup and update the call site cache.
4467 if (!HasCallSiteInlineCheck()) { 4415 if (!HasCallSiteInlineCheck()) {
4468 // Look up the function and the map in the instanceof cache. 4416 // Look up the function and the map in the instanceof cache.
4469 NearLabel miss; 4417 Label miss;
4470 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); 4418 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4471 __ cmp(function, 4419 __ cmp(function,
4472 Operand::StaticArray(scratch, times_pointer_size, roots_address)); 4420 Operand::StaticArray(scratch, times_pointer_size, roots_address));
4473 __ j(not_equal, &miss); 4421 __ j(not_equal, &miss, Label::kNear);
4474 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); 4422 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4475 __ cmp(map, Operand::StaticArray( 4423 __ cmp(map, Operand::StaticArray(
4476 scratch, times_pointer_size, roots_address)); 4424 scratch, times_pointer_size, roots_address));
4477 __ j(not_equal, &miss); 4425 __ j(not_equal, &miss, Label::kNear);
4478 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 4426 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4479 __ mov(eax, Operand::StaticArray( 4427 __ mov(eax, Operand::StaticArray(
4480 scratch, times_pointer_size, roots_address)); 4428 scratch, times_pointer_size, roots_address));
4481 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4429 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4482 __ bind(&miss); 4430 __ bind(&miss);
4483 } 4431 }
4484 4432
4485 // Get the prototype of the function. 4433 // Get the prototype of the function.
4486 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 4434 __ TryGetFunctionPrototype(function, prototype, scratch, &slow);
4487 4435
4488 // Check that the function prototype is a JS object. 4436 // Check that the function prototype is a JS object.
4489 __ test(prototype, Immediate(kSmiTagMask)); 4437 __ test(prototype, Immediate(kSmiTagMask));
4490 __ j(zero, &slow, not_taken); 4438 __ j(zero, &slow);
4491 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 4439 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4492 4440
4493 // Update the global instanceof or call site inlined cache with the current 4441 // Update the global instanceof or call site inlined cache with the current
4494 // map and function. The cached answer will be set when it is known below. 4442 // map and function. The cached answer will be set when it is known below.
4495 if (!HasCallSiteInlineCheck()) { 4443 if (!HasCallSiteInlineCheck()) {
4496 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); 4444 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
4497 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); 4445 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
4498 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); 4446 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
4499 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), 4447 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
4500 function); 4448 function);
4501 } else { 4449 } else {
4502 // The constants for the code patching are based on no push instructions 4450 // The constants for the code patching are based on no push instructions
4503 // at the call site. 4451 // at the call site.
4504 ASSERT(HasArgsInRegisters()); 4452 ASSERT(HasArgsInRegisters());
4505 // Get return address and delta to inlined map check. 4453 // Get return address and delta to inlined map check.
4506 __ mov(scratch, Operand(esp, 0 * kPointerSize)); 4454 __ mov(scratch, Operand(esp, 0 * kPointerSize));
4507 __ sub(scratch, Operand(esp, 1 * kPointerSize)); 4455 __ sub(scratch, Operand(esp, 1 * kPointerSize));
4508 if (FLAG_debug_code) { 4456 if (FLAG_debug_code) {
4509 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1); 4457 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
4510 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)"); 4458 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
4511 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2); 4459 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
4512 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)"); 4460 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
4513 } 4461 }
4514 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); 4462 __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
4515 } 4463 }
4516 4464
4517 // Loop through the prototype chain of the object looking for the function 4465 // Loop through the prototype chain of the object looking for the function
4518 // prototype. 4466 // prototype.
4519 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); 4467 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
4520 NearLabel loop, is_instance, is_not_instance; 4468 Label loop, is_instance, is_not_instance;
4521 __ bind(&loop); 4469 __ bind(&loop);
4522 __ cmp(scratch, Operand(prototype)); 4470 __ cmp(scratch, Operand(prototype));
4523 __ j(equal, &is_instance); 4471 __ j(equal, &is_instance, Label::kNear);
4524 Factory* factory = masm->isolate()->factory(); 4472 Factory* factory = masm->isolate()->factory();
4525 __ cmp(Operand(scratch), Immediate(factory->null_value())); 4473 __ cmp(Operand(scratch), Immediate(factory->null_value()));
4526 __ j(equal, &is_not_instance); 4474 __ j(equal, &is_not_instance, Label::kNear);
4527 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 4475 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4528 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); 4476 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
4529 __ jmp(&loop); 4477 __ jmp(&loop);
4530 4478
4531 __ bind(&is_instance); 4479 __ bind(&is_instance);
4532 if (!HasCallSiteInlineCheck()) { 4480 if (!HasCallSiteInlineCheck()) {
4533 __ Set(eax, Immediate(0)); 4481 __ Set(eax, Immediate(0));
4534 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); 4482 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
4535 __ mov(Operand::StaticArray(scratch, 4483 __ mov(Operand::StaticArray(scratch,
4536 times_pointer_size, roots_address), eax); 4484 times_pointer_size, roots_address), eax);
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
4570 __ Set(eax, Immediate(Smi::FromInt(1))); 4518 __ Set(eax, Immediate(Smi::FromInt(1)));
4571 } 4519 }
4572 } 4520 }
4573 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4521 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4574 4522
4575 Label object_not_null, object_not_null_or_smi; 4523 Label object_not_null, object_not_null_or_smi;
4576 __ bind(&not_js_object); 4524 __ bind(&not_js_object);
4577 // Before null, smi and string value checks, check that the rhs is a function 4525 // Before null, smi and string value checks, check that the rhs is a function
4578 // as for a non-function rhs an exception needs to be thrown. 4526 // as for a non-function rhs an exception needs to be thrown.
4579 __ test(function, Immediate(kSmiTagMask)); 4527 __ test(function, Immediate(kSmiTagMask));
4580 __ j(zero, &slow, not_taken); 4528 __ j(zero, &slow);
4581 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); 4529 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
4582 __ j(not_equal, &slow, not_taken); 4530 __ j(not_equal, &slow);
4583 4531
4584 // Null is not instance of anything. 4532 // Null is not instance of anything.
4585 __ cmp(object, factory->null_value()); 4533 __ cmp(object, factory->null_value());
4586 __ j(not_equal, &object_not_null); 4534 __ j(not_equal, &object_not_null);
4587 __ Set(eax, Immediate(Smi::FromInt(1))); 4535 __ Set(eax, Immediate(Smi::FromInt(1)));
4588 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4536 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4589 4537
4590 __ bind(&object_not_null); 4538 __ bind(&object_not_null);
4591 // Smi values is not instance of anything. 4539 // Smi values is not instance of anything.
4592 __ test(object, Immediate(kSmiTagMask)); 4540 __ test(object, Immediate(kSmiTagMask));
4593 __ j(not_zero, &object_not_null_or_smi, not_taken); 4541 __ j(not_zero, &object_not_null_or_smi);
4594 __ Set(eax, Immediate(Smi::FromInt(1))); 4542 __ Set(eax, Immediate(Smi::FromInt(1)));
4595 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4543 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4596 4544
4597 __ bind(&object_not_null_or_smi); 4545 __ bind(&object_not_null_or_smi);
4598 // String values is not instance of anything. 4546 // String values is not instance of anything.
4599 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); 4547 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
4600 __ j(NegateCondition(is_string), &slow); 4548 __ j(NegateCondition(is_string), &slow);
4601 __ Set(eax, Immediate(Smi::FromInt(1))); 4549 __ Set(eax, Immediate(Smi::FromInt(1)));
4602 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4550 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4603 4551
4604 // Slow-case: Go through the JavaScript implementation. 4552 // Slow-case: Go through the JavaScript implementation.
4605 __ bind(&slow); 4553 __ bind(&slow);
4606 if (!ReturnTrueFalseObject()) { 4554 if (!ReturnTrueFalseObject()) {
4607 // Tail call the builtin which returns 0 or 1. 4555 // Tail call the builtin which returns 0 or 1.
4608 if (HasArgsInRegisters()) { 4556 if (HasArgsInRegisters()) {
4609 // Push arguments below return address. 4557 // Push arguments below return address.
4610 __ pop(scratch); 4558 __ pop(scratch);
4611 __ push(object); 4559 __ push(object);
4612 __ push(function); 4560 __ push(function);
4613 __ push(scratch); 4561 __ push(scratch);
4614 } 4562 }
4615 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 4563 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
4616 } else { 4564 } else {
4617 // Call the builtin and convert 0/1 to true/false. 4565 // Call the builtin and convert 0/1 to true/false.
4618 __ EnterInternalFrame(); 4566 __ EnterInternalFrame();
4619 __ push(object); 4567 __ push(object);
4620 __ push(function); 4568 __ push(function);
4621 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); 4569 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
4622 __ LeaveInternalFrame(); 4570 __ LeaveInternalFrame();
4623 NearLabel true_value, done; 4571 Label true_value, done;
4624 __ test(eax, Operand(eax)); 4572 __ test(eax, Operand(eax));
4625 __ j(zero, &true_value); 4573 __ j(zero, &true_value, Label::kNear);
4626 __ mov(eax, factory->false_value()); 4574 __ mov(eax, factory->false_value());
4627 __ jmp(&done); 4575 __ jmp(&done, Label::kNear);
4628 __ bind(&true_value); 4576 __ bind(&true_value);
4629 __ mov(eax, factory->true_value()); 4577 __ mov(eax, factory->true_value());
4630 __ bind(&done); 4578 __ bind(&done);
4631 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); 4579 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
4632 } 4580 }
4633 } 4581 }
4634 4582
4635 4583
4636 Register InstanceofStub::left() { return eax; } 4584 Register InstanceofStub::left() { return eax; }
4637 4585
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
4796 void StringCharCodeAtGenerator::GenerateSlow( 4744 void StringCharCodeAtGenerator::GenerateSlow(
4797 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { 4745 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4798 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); 4746 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
4799 4747
4800 // Index is not a smi. 4748 // Index is not a smi.
4801 __ bind(&index_not_smi_); 4749 __ bind(&index_not_smi_);
4802 // If index is a heap number, try converting it to an integer. 4750 // If index is a heap number, try converting it to an integer.
4803 __ CheckMap(index_, 4751 __ CheckMap(index_,
4804 masm->isolate()->factory()->heap_number_map(), 4752 masm->isolate()->factory()->heap_number_map(),
4805 index_not_number_, 4753 index_not_number_,
4806 true); 4754 DONT_DO_SMI_CHECK);
4807 call_helper.BeforeCall(masm); 4755 call_helper.BeforeCall(masm);
4808 __ push(object_); 4756 __ push(object_);
4809 __ push(index_); 4757 __ push(index_);
4810 __ push(index_); // Consumed by runtime conversion function. 4758 __ push(index_); // Consumed by runtime conversion function.
4811 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 4759 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
4812 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 4760 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
4813 } else { 4761 } else {
4814 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 4762 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
4815 // NumberToSmi discards numbers that are not exact integers. 4763 // NumberToSmi discards numbers that are not exact integers.
4816 __ CallRuntime(Runtime::kNumberToSmi, 1); 4764 __ CallRuntime(Runtime::kNumberToSmi, 1);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
4855 // StringCharFromCodeGenerator 4803 // StringCharFromCodeGenerator
4856 4804
4857 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 4805 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4858 // Fast case of Heap::LookupSingleCharacterStringFromCode. 4806 // Fast case of Heap::LookupSingleCharacterStringFromCode.
4859 STATIC_ASSERT(kSmiTag == 0); 4807 STATIC_ASSERT(kSmiTag == 0);
4860 STATIC_ASSERT(kSmiShiftSize == 0); 4808 STATIC_ASSERT(kSmiShiftSize == 0);
4861 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1)); 4809 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
4862 __ test(code_, 4810 __ test(code_,
4863 Immediate(kSmiTagMask | 4811 Immediate(kSmiTagMask |
4864 ((~String::kMaxAsciiCharCode) << kSmiTagSize))); 4812 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
4865 __ j(not_zero, &slow_case_, not_taken); 4813 __ j(not_zero, &slow_case_);
4866 4814
4867 Factory* factory = masm->isolate()->factory(); 4815 Factory* factory = masm->isolate()->factory();
4868 __ Set(result_, Immediate(factory->single_character_string_cache())); 4816 __ Set(result_, Immediate(factory->single_character_string_cache()));
4869 STATIC_ASSERT(kSmiTag == 0); 4817 STATIC_ASSERT(kSmiTag == 0);
4870 STATIC_ASSERT(kSmiTagSize == 1); 4818 STATIC_ASSERT(kSmiTagSize == 1);
4871 STATIC_ASSERT(kSmiShiftSize == 0); 4819 STATIC_ASSERT(kSmiShiftSize == 0);
4872 // At this point code register contains smi tagged ascii char code. 4820 // At this point code register contains smi tagged ascii char code.
4873 __ mov(result_, FieldOperand(result_, 4821 __ mov(result_, FieldOperand(result_,
4874 code_, times_half_pointer_size, 4822 code_, times_half_pointer_size,
4875 FixedArray::kHeaderSize)); 4823 FixedArray::kHeaderSize));
4876 __ cmp(result_, factory->undefined_value()); 4824 __ cmp(result_, factory->undefined_value());
4877 __ j(equal, &slow_case_, not_taken); 4825 __ j(equal, &slow_case_);
4878 __ bind(&exit_); 4826 __ bind(&exit_);
4879 } 4827 }
4880 4828
4881 4829
4882 void StringCharFromCodeGenerator::GenerateSlow( 4830 void StringCharFromCodeGenerator::GenerateSlow(
4883 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { 4831 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
4884 __ Abort("Unexpected fallthrough to CharFromCode slow case"); 4832 __ Abort("Unexpected fallthrough to CharFromCode slow case");
4885 4833
4886 __ bind(&slow_case_); 4834 __ bind(&slow_case_);
4887 call_helper.BeforeCall(masm); 4835 call_helper.BeforeCall(masm);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
4946 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi, 4894 GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
4947 &call_builtin); 4895 &call_builtin);
4948 builtin_id = Builtins::STRING_ADD_LEFT; 4896 builtin_id = Builtins::STRING_ADD_LEFT;
4949 } 4897 }
4950 } 4898 }
4951 4899
4952 // Both arguments are strings. 4900 // Both arguments are strings.
4953 // eax: first string 4901 // eax: first string
4954 // edx: second string 4902 // edx: second string
4955 // Check if either of the strings are empty. In that case return the other. 4903 // Check if either of the strings are empty. In that case return the other.
4956 NearLabel second_not_zero_length, both_not_zero_length; 4904 Label second_not_zero_length, both_not_zero_length;
4957 __ mov(ecx, FieldOperand(edx, String::kLengthOffset)); 4905 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
4958 STATIC_ASSERT(kSmiTag == 0); 4906 STATIC_ASSERT(kSmiTag == 0);
4959 __ test(ecx, Operand(ecx)); 4907 __ test(ecx, Operand(ecx));
4960 __ j(not_zero, &second_not_zero_length); 4908 __ j(not_zero, &second_not_zero_length, Label::kNear);
4961 // Second string is empty, result is first string which is already in eax. 4909 // Second string is empty, result is first string which is already in eax.
4962 Counters* counters = masm->isolate()->counters(); 4910 Counters* counters = masm->isolate()->counters();
4963 __ IncrementCounter(counters->string_add_native(), 1); 4911 __ IncrementCounter(counters->string_add_native(), 1);
4964 __ ret(2 * kPointerSize); 4912 __ ret(2 * kPointerSize);
4965 __ bind(&second_not_zero_length); 4913 __ bind(&second_not_zero_length);
4966 __ mov(ebx, FieldOperand(eax, String::kLengthOffset)); 4914 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
4967 STATIC_ASSERT(kSmiTag == 0); 4915 STATIC_ASSERT(kSmiTag == 0);
4968 __ test(ebx, Operand(ebx)); 4916 __ test(ebx, Operand(ebx));
4969 __ j(not_zero, &both_not_zero_length); 4917 __ j(not_zero, &both_not_zero_length, Label::kNear);
4970 // First string is empty, result is second string which is in edx. 4918 // First string is empty, result is second string which is in edx.
4971 __ mov(eax, edx); 4919 __ mov(eax, edx);
4972 __ IncrementCounter(counters->string_add_native(), 1); 4920 __ IncrementCounter(counters->string_add_native(), 1);
4973 __ ret(2 * kPointerSize); 4921 __ ret(2 * kPointerSize);
4974 4922
4975 // Both strings are non-empty. 4923 // Both strings are non-empty.
4976 // eax: first string 4924 // eax: first string
4977 // ebx: length of first string as a smi 4925 // ebx: length of first string as a smi
4978 // ecx: length of second string as a smi 4926 // ecx: length of second string as a smi
4979 // edx: second string 4927 // edx: second string
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
5233 __ bind(&done); 5181 __ bind(&done);
5234 } 5182 }
5235 5183
5236 5184
5237 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, 5185 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5238 Register dest, 5186 Register dest,
5239 Register src, 5187 Register src,
5240 Register count, 5188 Register count,
5241 Register scratch, 5189 Register scratch,
5242 bool ascii) { 5190 bool ascii) {
5243 NearLabel loop; 5191 Label loop;
5244 __ bind(&loop); 5192 __ bind(&loop);
5245 // This loop just copies one character at a time, as it is only used for very 5193 // This loop just copies one character at a time, as it is only used for very
5246 // short strings. 5194 // short strings.
5247 if (ascii) { 5195 if (ascii) {
5248 __ mov_b(scratch, Operand(src, 0)); 5196 __ mov_b(scratch, Operand(src, 0));
5249 __ mov_b(Operand(dest, 0), scratch); 5197 __ mov_b(Operand(dest, 0), scratch);
5250 __ add(Operand(src), Immediate(1)); 5198 __ add(Operand(src), Immediate(1));
5251 __ add(Operand(dest), Immediate(1)); 5199 __ add(Operand(dest), Immediate(1));
5252 } else { 5200 } else {
5253 __ mov_w(scratch, Operand(src, 0)); 5201 __ mov_w(scratch, Operand(src, 0));
(...skipping 26 matching lines...) Expand all
5280 Label done; 5228 Label done;
5281 __ test(count, Operand(count)); 5229 __ test(count, Operand(count));
5282 __ j(zero, &done); 5230 __ j(zero, &done);
5283 5231
5284 // Make count the number of bytes to copy. 5232 // Make count the number of bytes to copy.
5285 if (!ascii) { 5233 if (!ascii) {
5286 __ shl(count, 1); 5234 __ shl(count, 1);
5287 } 5235 }
5288 5236
5289 // Don't enter the rep movs if there are less than 4 bytes to copy. 5237 // Don't enter the rep movs if there are less than 4 bytes to copy.
5290 NearLabel last_bytes; 5238 Label last_bytes;
5291 __ test(count, Immediate(~3)); 5239 __ test(count, Immediate(~3));
5292 __ j(zero, &last_bytes); 5240 __ j(zero, &last_bytes, Label::kNear);
5293 5241
5294 // Copy from edi to esi using rep movs instruction. 5242 // Copy from edi to esi using rep movs instruction.
5295 __ mov(scratch, count); 5243 __ mov(scratch, count);
5296 __ sar(count, 2); // Number of doublewords to copy. 5244 __ sar(count, 2); // Number of doublewords to copy.
5297 __ cld(); 5245 __ cld();
5298 __ rep_movs(); 5246 __ rep_movs();
5299 5247
5300 // Find number of bytes left. 5248 // Find number of bytes left.
5301 __ mov(count, scratch); 5249 __ mov(count, scratch);
5302 __ and_(count, 3); 5250 __ and_(count, 3);
5303 5251
5304 // Check if there are more bytes to copy. 5252 // Check if there are more bytes to copy.
5305 __ bind(&last_bytes); 5253 __ bind(&last_bytes);
5306 __ test(count, Operand(count)); 5254 __ test(count, Operand(count));
5307 __ j(zero, &done); 5255 __ j(zero, &done);
5308 5256
5309 // Copy remaining characters. 5257 // Copy remaining characters.
5310 NearLabel loop; 5258 Label loop;
5311 __ bind(&loop); 5259 __ bind(&loop);
5312 __ mov_b(scratch, Operand(src, 0)); 5260 __ mov_b(scratch, Operand(src, 0));
5313 __ mov_b(Operand(dest, 0), scratch); 5261 __ mov_b(Operand(dest, 0), scratch);
5314 __ add(Operand(src), Immediate(1)); 5262 __ add(Operand(src), Immediate(1));
5315 __ add(Operand(dest), Immediate(1)); 5263 __ add(Operand(dest), Immediate(1));
5316 __ sub(Operand(count), Immediate(1)); 5264 __ sub(Operand(count), Immediate(1));
5317 __ j(not_zero, &loop); 5265 __ j(not_zero, &loop);
5318 5266
5319 __ bind(&done); 5267 __ bind(&done);
5320 } 5268 }
5321 5269
5322 5270
5323 void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, 5271 void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
5324 Register c1, 5272 Register c1,
5325 Register c2, 5273 Register c2,
5326 Register scratch1, 5274 Register scratch1,
5327 Register scratch2, 5275 Register scratch2,
5328 Register scratch3, 5276 Register scratch3,
5329 Label* not_probed, 5277 Label* not_probed,
5330 Label* not_found) { 5278 Label* not_found) {
5331 // Register scratch3 is the general scratch register in this function. 5279 // Register scratch3 is the general scratch register in this function.
5332 Register scratch = scratch3; 5280 Register scratch = scratch3;
5333 5281
5334 // Make sure that both characters are not digits as such strings has a 5282 // Make sure that both characters are not digits as such strings has a
5335 // different hash algorithm. Don't try to look for these in the symbol table. 5283 // different hash algorithm. Don't try to look for these in the symbol table.
5336 NearLabel not_array_index; 5284 Label not_array_index;
5337 __ mov(scratch, c1); 5285 __ mov(scratch, c1);
5338 __ sub(Operand(scratch), Immediate(static_cast<int>('0'))); 5286 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5339 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0'))); 5287 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
5340 __ j(above, &not_array_index); 5288 __ j(above, &not_array_index, Label::kNear);
5341 __ mov(scratch, c2); 5289 __ mov(scratch, c2);
5342 __ sub(Operand(scratch), Immediate(static_cast<int>('0'))); 5290 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
5343 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0'))); 5291 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
5344 __ j(below_equal, not_probed); 5292 __ j(below_equal, not_probed);
5345 5293
5346 __ bind(&not_array_index); 5294 __ bind(&not_array_index);
5347 // Calculate the two character string hash. 5295 // Calculate the two character string hash.
5348 Register hash = scratch1; 5296 Register hash = scratch1;
5349 GenerateHashInit(masm, hash, c1, scratch); 5297 GenerateHashInit(masm, hash, c1, scratch);
5350 GenerateHashAddCharacter(masm, hash, c2, scratch); 5298 GenerateHashAddCharacter(masm, hash, c2, scratch);
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
5488 // hash ^= hash >> 11; 5436 // hash ^= hash >> 11;
5489 __ mov(scratch, hash); 5437 __ mov(scratch, hash);
5490 __ sar(scratch, 11); 5438 __ sar(scratch, 11);
5491 __ xor_(hash, Operand(scratch)); 5439 __ xor_(hash, Operand(scratch));
5492 // hash += hash << 15; 5440 // hash += hash << 15;
5493 __ mov(scratch, hash); 5441 __ mov(scratch, hash);
5494 __ shl(scratch, 15); 5442 __ shl(scratch, 15);
5495 __ add(hash, Operand(scratch)); 5443 __ add(hash, Operand(scratch));
5496 5444
5497 // if (hash == 0) hash = 27; 5445 // if (hash == 0) hash = 27;
5498 NearLabel hash_not_zero; 5446 Label hash_not_zero;
5499 __ test(hash, Operand(hash)); 5447 __ test(hash, Operand(hash));
5500 __ j(not_zero, &hash_not_zero); 5448 __ j(not_zero, &hash_not_zero, Label::kNear);
5501 __ mov(hash, Immediate(27)); 5449 __ mov(hash, Immediate(27));
5502 __ bind(&hash_not_zero); 5450 __ bind(&hash_not_zero);
5503 } 5451 }
5504 5452
5505 5453
5506 void SubStringStub::Generate(MacroAssembler* masm) { 5454 void SubStringStub::Generate(MacroAssembler* masm) {
5507 Label runtime; 5455 Label runtime;
5508 5456
5509 // Stack frame on entry. 5457 // Stack frame on entry.
5510 // esp[0]: return address 5458 // esp[0]: return address
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
5645 __ bind(&return_eax); 5593 __ bind(&return_eax);
5646 __ IncrementCounter(counters->sub_string_native(), 1); 5594 __ IncrementCounter(counters->sub_string_native(), 1);
5647 __ ret(3 * kPointerSize); 5595 __ ret(3 * kPointerSize);
5648 5596
5649 // Just jump to runtime to create the sub string. 5597 // Just jump to runtime to create the sub string.
5650 __ bind(&runtime); 5598 __ bind(&runtime);
5651 __ TailCallRuntime(Runtime::kSubString, 3, 1); 5599 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5652 } 5600 }
5653 5601
5654 5602
5603 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
5604 Register left,
5605 Register right,
5606 Register scratch1,
5607 Register scratch2) {
5608 Register length = scratch1;
5609
5610 // Compare lengths.
5611 Label strings_not_equal, check_zero_length;
5612 __ mov(length, FieldOperand(left, String::kLengthOffset));
5613 __ cmp(length, FieldOperand(right, String::kLengthOffset));
5614 __ j(equal, &check_zero_length, Label::kNear);
5615 __ bind(&strings_not_equal);
5616 __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
5617 __ ret(0);
5618
5619 // Check if the length is zero.
5620 Label compare_chars;
5621 __ bind(&check_zero_length);
5622 STATIC_ASSERT(kSmiTag == 0);
5623 __ test(length, Operand(length));
5624 __ j(not_zero, &compare_chars, Label::kNear);
5625 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5626 __ ret(0);
5627
5628 // Compare characters.
5629 __ bind(&compare_chars);
5630 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
5631 &strings_not_equal, Label::kNear);
5632
5633 // Characters are equal.
5634 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5635 __ ret(0);
5636 }
5637
5638
5655 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, 5639 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
5656 Register left, 5640 Register left,
5657 Register right, 5641 Register right,
5658 Register scratch1, 5642 Register scratch1,
5659 Register scratch2, 5643 Register scratch2,
5660 Register scratch3) { 5644 Register scratch3) {
5661 Label result_not_equal;
5662 Label result_greater;
5663 Label compare_lengths;
5664
5665 Counters* counters = masm->isolate()->counters(); 5645 Counters* counters = masm->isolate()->counters();
5666 __ IncrementCounter(counters->string_compare_native(), 1); 5646 __ IncrementCounter(counters->string_compare_native(), 1);
5667 5647
5668 // Find minimum length. 5648 // Find minimum length.
5669 NearLabel left_shorter; 5649 Label left_shorter;
5670 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); 5650 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
5671 __ mov(scratch3, scratch1); 5651 __ mov(scratch3, scratch1);
5672 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); 5652 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
5673 5653
5674 Register length_delta = scratch3; 5654 Register length_delta = scratch3;
5675 5655
5676 __ j(less_equal, &left_shorter); 5656 __ j(less_equal, &left_shorter, Label::kNear);
5677 // Right string is shorter. Change scratch1 to be length of right string. 5657 // Right string is shorter. Change scratch1 to be length of right string.
5678 __ sub(scratch1, Operand(length_delta)); 5658 __ sub(scratch1, Operand(length_delta));
5679 __ bind(&left_shorter); 5659 __ bind(&left_shorter);
5680 5660
5681 Register min_length = scratch1; 5661 Register min_length = scratch1;
5682 5662
5683 // If either length is zero, just compare lengths. 5663 // If either length is zero, just compare lengths.
5664 Label compare_lengths;
5684 __ test(min_length, Operand(min_length)); 5665 __ test(min_length, Operand(min_length));
5685 __ j(zero, &compare_lengths); 5666 __ j(zero, &compare_lengths, Label::kNear);
5686 5667
5687 // Change index to run from -min_length to -1 by adding min_length 5668 // Compare characters.
5688 // to string start. This means that loop ends when index reaches zero, 5669 Label result_not_equal;
5689 // which doesn't need an additional compare. 5670 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
5690 __ SmiUntag(min_length); 5671 &result_not_equal, Label::kNear);
5691 __ lea(left,
5692 FieldOperand(left,
5693 min_length, times_1,
5694 SeqAsciiString::kHeaderSize));
5695 __ lea(right,
5696 FieldOperand(right,
5697 min_length, times_1,
5698 SeqAsciiString::kHeaderSize));
5699 __ neg(min_length);
5700
5701 Register index = min_length; // index = -min_length;
5702
5703 {
5704 // Compare loop.
5705 NearLabel loop;
5706 __ bind(&loop);
5707 // Compare characters.
5708 __ mov_b(scratch2, Operand(left, index, times_1, 0));
5709 __ cmpb(scratch2, Operand(right, index, times_1, 0));
5710 __ j(not_equal, &result_not_equal);
5711 __ add(Operand(index), Immediate(1));
5712 __ j(not_zero, &loop);
5713 }
5714 5672
5715 // Compare lengths - strings up to min-length are equal. 5673 // Compare lengths - strings up to min-length are equal.
5716 __ bind(&compare_lengths); 5674 __ bind(&compare_lengths);
5717 __ test(length_delta, Operand(length_delta)); 5675 __ test(length_delta, Operand(length_delta));
5718 __ j(not_zero, &result_not_equal); 5676 __ j(not_zero, &result_not_equal, Label::kNear);
5719 5677
5720 // Result is EQUAL. 5678 // Result is EQUAL.
5721 STATIC_ASSERT(EQUAL == 0); 5679 STATIC_ASSERT(EQUAL == 0);
5722 STATIC_ASSERT(kSmiTag == 0); 5680 STATIC_ASSERT(kSmiTag == 0);
5723 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 5681 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5724 __ ret(0); 5682 __ ret(0);
5725 5683
5684 Label result_greater;
5726 __ bind(&result_not_equal); 5685 __ bind(&result_not_equal);
5727 __ j(greater, &result_greater); 5686 __ j(greater, &result_greater, Label::kNear);
5728 5687
5729 // Result is LESS. 5688 // Result is LESS.
5730 __ Set(eax, Immediate(Smi::FromInt(LESS))); 5689 __ Set(eax, Immediate(Smi::FromInt(LESS)));
5731 __ ret(0); 5690 __ ret(0);
5732 5691
5733 // Result is GREATER. 5692 // Result is GREATER.
5734 __ bind(&result_greater); 5693 __ bind(&result_greater);
5735 __ Set(eax, Immediate(Smi::FromInt(GREATER))); 5694 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
5736 __ ret(0); 5695 __ ret(0);
5737 } 5696 }
5738 5697
5739 5698
5699 void StringCompareStub::GenerateAsciiCharsCompareLoop(
5700 MacroAssembler* masm,
5701 Register left,
5702 Register right,
5703 Register length,
5704 Register scratch,
5705 Label* chars_not_equal,
5706 Label::Distance chars_not_equal_near) {
5707 // Change index to run from -length to -1 by adding length to string
5708 // start. This means that loop ends when index reaches zero, which
5709 // doesn't need an additional compare.
5710 __ SmiUntag(length);
5711 __ lea(left,
5712 FieldOperand(left, length, times_1, SeqAsciiString::kHeaderSize));
5713 __ lea(right,
5714 FieldOperand(right, length, times_1, SeqAsciiString::kHeaderSize));
5715 __ neg(length);
5716 Register index = length; // index = -length;
5717
5718 // Compare loop.
5719 Label loop;
5720 __ bind(&loop);
5721 __ mov_b(scratch, Operand(left, index, times_1, 0));
5722 __ cmpb(scratch, Operand(right, index, times_1, 0));
5723 __ j(not_equal, chars_not_equal, chars_not_equal_near);
5724 __ add(Operand(index), Immediate(1));
5725 __ j(not_zero, &loop);
5726 }
5727
5728
5740 void StringCompareStub::Generate(MacroAssembler* masm) { 5729 void StringCompareStub::Generate(MacroAssembler* masm) {
5741 Label runtime; 5730 Label runtime;
5742 5731
5743 // Stack frame on entry. 5732 // Stack frame on entry.
5744 // esp[0]: return address 5733 // esp[0]: return address
5745 // esp[4]: right string 5734 // esp[4]: right string
5746 // esp[8]: left string 5735 // esp[8]: left string
5747 5736
5748 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left 5737 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
5749 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right 5738 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
5750 5739
5751 NearLabel not_same; 5740 Label not_same;
5752 __ cmp(edx, Operand(eax)); 5741 __ cmp(edx, Operand(eax));
5753 __ j(not_equal, &not_same); 5742 __ j(not_equal, &not_same, Label::kNear);
5754 STATIC_ASSERT(EQUAL == 0); 5743 STATIC_ASSERT(EQUAL == 0);
5755 STATIC_ASSERT(kSmiTag == 0); 5744 STATIC_ASSERT(kSmiTag == 0);
5756 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 5745 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5757 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1); 5746 __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
5758 __ ret(2 * kPointerSize); 5747 __ ret(2 * kPointerSize);
5759 5748
5760 __ bind(&not_same); 5749 __ bind(&not_same);
5761 5750
5762 // Check that both objects are sequential ascii strings. 5751 // Check that both objects are sequential ascii strings.
5763 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime); 5752 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
5764 5753
5765 // Compare flat ascii strings. 5754 // Compare flat ascii strings.
5766 // Drop arguments from the stack. 5755 // Drop arguments from the stack.
5767 __ pop(ecx); 5756 __ pop(ecx);
5768 __ add(Operand(esp), Immediate(2 * kPointerSize)); 5757 __ add(Operand(esp), Immediate(2 * kPointerSize));
5769 __ push(ecx); 5758 __ push(ecx);
5770 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi); 5759 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
5771 5760
5772 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 5761 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
5773 // tagged as a small integer. 5762 // tagged as a small integer.
5774 __ bind(&runtime); 5763 __ bind(&runtime);
5775 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 5764 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
5776 } 5765 }
5777 5766
5778 5767
5779 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 5768 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
5780 ASSERT(state_ == CompareIC::SMIS); 5769 ASSERT(state_ == CompareIC::SMIS);
5781 NearLabel miss; 5770 Label miss;
5782 __ mov(ecx, Operand(edx)); 5771 __ mov(ecx, Operand(edx));
5783 __ or_(ecx, Operand(eax)); 5772 __ or_(ecx, Operand(eax));
5784 __ test(ecx, Immediate(kSmiTagMask)); 5773 __ test(ecx, Immediate(kSmiTagMask));
5785 __ j(not_zero, &miss, not_taken); 5774 __ j(not_zero, &miss, Label::kNear);
5786 5775
5787 if (GetCondition() == equal) { 5776 if (GetCondition() == equal) {
5788 // For equality we do not care about the sign of the result. 5777 // For equality we do not care about the sign of the result.
5789 __ sub(eax, Operand(edx)); 5778 __ sub(eax, Operand(edx));
5790 } else { 5779 } else {
5791 NearLabel done; 5780 Label done;
5792 __ sub(edx, Operand(eax)); 5781 __ sub(edx, Operand(eax));
5793 __ j(no_overflow, &done); 5782 __ j(no_overflow, &done, Label::kNear);
5794 // Correct sign of result in case of overflow. 5783 // Correct sign of result in case of overflow.
5795 __ not_(edx); 5784 __ not_(edx);
5796 __ bind(&done); 5785 __ bind(&done);
5797 __ mov(eax, edx); 5786 __ mov(eax, edx);
5798 } 5787 }
5799 __ ret(0); 5788 __ ret(0);
5800 5789
5801 __ bind(&miss); 5790 __ bind(&miss);
5802 GenerateMiss(masm); 5791 GenerateMiss(masm);
5803 } 5792 }
5804 5793
5805 5794
5806 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { 5795 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
5807 ASSERT(state_ == CompareIC::HEAP_NUMBERS); 5796 ASSERT(state_ == CompareIC::HEAP_NUMBERS);
5808 5797
5809 NearLabel generic_stub; 5798 Label generic_stub;
5810 NearLabel unordered; 5799 Label unordered;
5811 NearLabel miss; 5800 Label miss;
5812 __ mov(ecx, Operand(edx)); 5801 __ mov(ecx, Operand(edx));
5813 __ and_(ecx, Operand(eax)); 5802 __ and_(ecx, Operand(eax));
5814 __ test(ecx, Immediate(kSmiTagMask)); 5803 __ test(ecx, Immediate(kSmiTagMask));
5815 __ j(zero, &generic_stub, not_taken); 5804 __ j(zero, &generic_stub, Label::kNear);
5816 5805
5817 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx); 5806 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
5818 __ j(not_equal, &miss, not_taken); 5807 __ j(not_equal, &miss, Label::kNear);
5819 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); 5808 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
5820 __ j(not_equal, &miss, not_taken); 5809 __ j(not_equal, &miss, Label::kNear);
5821 5810
5822 // Inlining the double comparison and falling back to the general compare 5811 // Inlining the double comparison and falling back to the general compare
5823 // stub if NaN is involved or SS2 or CMOV is unsupported. 5812 // stub if NaN is involved or SS2 or CMOV is unsupported.
5824 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) { 5813 if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
5825 CpuFeatures::Scope scope1(SSE2); 5814 CpuFeatures::Scope scope1(SSE2);
5826 CpuFeatures::Scope scope2(CMOV); 5815 CpuFeatures::Scope scope2(CMOV);
5827 5816
5828 // Load left and right operand 5817 // Load left and right operand
5829 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 5818 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
5830 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 5819 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
5831 5820
5832 // Compare operands 5821 // Compare operands
5833 __ ucomisd(xmm0, xmm1); 5822 __ ucomisd(xmm0, xmm1);
5834 5823
5835 // Don't base result on EFLAGS when a NaN is involved. 5824 // Don't base result on EFLAGS when a NaN is involved.
5836 __ j(parity_even, &unordered, not_taken); 5825 __ j(parity_even, &unordered, Label::kNear);
5837 5826
5838 // Return a result of -1, 0, or 1, based on EFLAGS. 5827 // Return a result of -1, 0, or 1, based on EFLAGS.
5839 // Performing mov, because xor would destroy the flag register. 5828 // Performing mov, because xor would destroy the flag register.
5840 __ mov(eax, 0); // equal 5829 __ mov(eax, 0); // equal
5841 __ mov(ecx, Immediate(Smi::FromInt(1))); 5830 __ mov(ecx, Immediate(Smi::FromInt(1)));
5842 __ cmov(above, eax, Operand(ecx)); 5831 __ cmov(above, eax, Operand(ecx));
5843 __ mov(ecx, Immediate(Smi::FromInt(-1))); 5832 __ mov(ecx, Immediate(Smi::FromInt(-1)));
5844 __ cmov(below, eax, Operand(ecx)); 5833 __ cmov(below, eax, Operand(ecx));
5845 __ ret(0); 5834 __ ret(0);
5846 5835
5847 __ bind(&unordered); 5836 __ bind(&unordered);
5848 } 5837 }
5849 5838
5850 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); 5839 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
5851 __ bind(&generic_stub); 5840 __ bind(&generic_stub);
5852 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 5841 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
5853 5842
5854 __ bind(&miss); 5843 __ bind(&miss);
5855 GenerateMiss(masm); 5844 GenerateMiss(masm);
5856 } 5845 }
5857 5846
5858 5847
5848 void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
5849 ASSERT(state_ == CompareIC::SYMBOLS);
5850 ASSERT(GetCondition() == equal);
5851
5852 // Registers containing left and right operands respectively.
5853 Register left = edx;
5854 Register right = eax;
5855 Register tmp1 = ecx;
5856 Register tmp2 = ebx;
5857
5858 // Check that both operands are heap objects.
5859 Label miss;
5860 __ mov(tmp1, Operand(left));
5861 STATIC_ASSERT(kSmiTag == 0);
5862 __ and_(tmp1, Operand(right));
5863 __ test(tmp1, Immediate(kSmiTagMask));
5864 __ j(zero, &miss, Label::kNear);
5865
5866 // Check that both operands are symbols.
5867 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
5868 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
5869 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
5870 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
5871 STATIC_ASSERT(kSymbolTag != 0);
5872 __ and_(tmp1, Operand(tmp2));
5873 __ test(tmp1, Immediate(kIsSymbolMask));
5874 __ j(zero, &miss, Label::kNear);
5875
5876 // Symbols are compared by identity.
5877 Label done;
5878 __ cmp(left, Operand(right));
5879 // Make sure eax is non-zero. At this point input operands are
5880 // guaranteed to be non-zero.
5881 ASSERT(right.is(eax));
5882 __ j(not_equal, &done, Label::kNear);
5883 STATIC_ASSERT(EQUAL == 0);
5884 STATIC_ASSERT(kSmiTag == 0);
5885 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5886 __ bind(&done);
5887 __ ret(0);
5888
5889 __ bind(&miss);
5890 GenerateMiss(masm);
5891 }
5892
5893
5859 void ICCompareStub::GenerateStrings(MacroAssembler* masm) { 5894 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
5860 ASSERT(state_ == CompareIC::STRINGS); 5895 ASSERT(state_ == CompareIC::STRINGS);
5896 ASSERT(GetCondition() == equal);
5861 Label miss; 5897 Label miss;
5862 5898
5863 // Registers containing left and right operands respectively. 5899 // Registers containing left and right operands respectively.
5864 Register left = edx; 5900 Register left = edx;
5865 Register right = eax; 5901 Register right = eax;
5866 Register tmp1 = ecx; 5902 Register tmp1 = ecx;
5867 Register tmp2 = ebx; 5903 Register tmp2 = ebx;
5868 Register tmp3 = edi; 5904 Register tmp3 = edi;
5869 5905
5870 // Check that both operands are heap objects. 5906 // Check that both operands are heap objects.
5871 __ mov(tmp1, Operand(left)); 5907 __ mov(tmp1, Operand(left));
5872 STATIC_ASSERT(kSmiTag == 0); 5908 STATIC_ASSERT(kSmiTag == 0);
5873 __ and_(tmp1, Operand(right)); 5909 __ and_(tmp1, Operand(right));
5874 __ test(tmp1, Immediate(kSmiTagMask)); 5910 __ test(tmp1, Immediate(kSmiTagMask));
5875 __ j(zero, &miss); 5911 __ j(zero, &miss);
5876 5912
5877 // Check that both operands are strings. This leaves the instance 5913 // Check that both operands are strings. This leaves the instance
5878 // types loaded in tmp1 and tmp2. 5914 // types loaded in tmp1 and tmp2.
5879 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 5915 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
5880 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 5916 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
5881 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 5917 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
5882 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 5918 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
5883 __ mov(tmp3, tmp1); 5919 __ mov(tmp3, tmp1);
5884 STATIC_ASSERT(kNotStringTag != 0); 5920 STATIC_ASSERT(kNotStringTag != 0);
5885 __ or_(tmp3, Operand(tmp2)); 5921 __ or_(tmp3, Operand(tmp2));
5886 __ test(tmp3, Immediate(kIsNotStringMask)); 5922 __ test(tmp3, Immediate(kIsNotStringMask));
5887 __ j(not_zero, &miss); 5923 __ j(not_zero, &miss);
5888 5924
5889 // Fast check for identical strings. 5925 // Fast check for identical strings.
5890 NearLabel not_same; 5926 Label not_same;
5891 __ cmp(left, Operand(right)); 5927 __ cmp(left, Operand(right));
5892 __ j(not_equal, &not_same); 5928 __ j(not_equal, &not_same, Label::kNear);
5893 STATIC_ASSERT(EQUAL == 0); 5929 STATIC_ASSERT(EQUAL == 0);
5894 STATIC_ASSERT(kSmiTag == 0); 5930 STATIC_ASSERT(kSmiTag == 0);
5895 __ Set(eax, Immediate(Smi::FromInt(EQUAL))); 5931 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
5896 __ ret(0); 5932 __ ret(0);
5897 5933
5898 // Handle not identical strings. 5934 // Handle not identical strings.
5899 __ bind(&not_same); 5935 __ bind(&not_same);
5900 5936
5901 // Check that both strings are symbols. If they are, we're done 5937 // Check that both strings are symbols. If they are, we're done
5902 // because we already know they are not identical. 5938 // because we already know they are not identical.
5903 NearLabel do_compare; 5939 Label do_compare;
5904 ASSERT(GetCondition() == equal);
5905 STATIC_ASSERT(kSymbolTag != 0); 5940 STATIC_ASSERT(kSymbolTag != 0);
5906 __ and_(tmp1, Operand(tmp2)); 5941 __ and_(tmp1, Operand(tmp2));
5907 __ test(tmp1, Immediate(kIsSymbolMask)); 5942 __ test(tmp1, Immediate(kIsSymbolMask));
5908 __ j(zero, &do_compare); 5943 __ j(zero, &do_compare, Label::kNear);
5909 // Make sure eax is non-zero. At this point input operands are 5944 // Make sure eax is non-zero. At this point input operands are
5910 // guaranteed to be non-zero. 5945 // guaranteed to be non-zero.
5911 ASSERT(right.is(eax)); 5946 ASSERT(right.is(eax));
5912 __ ret(0); 5947 __ ret(0);
5913 5948
5914 // Check that both strings are sequential ASCII. 5949 // Check that both strings are sequential ASCII.
5915 Label runtime; 5950 Label runtime;
5916 __ bind(&do_compare); 5951 __ bind(&do_compare);
5917 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); 5952 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
5918 5953
5919 // Compare flat ASCII strings. Returns when done. 5954 // Compare flat ASCII strings. Returns when done.
5920 StringCompareStub::GenerateCompareFlatAsciiStrings( 5955 StringCompareStub::GenerateFlatAsciiStringEquals(
5921 masm, left, right, tmp1, tmp2, tmp3); 5956 masm, left, right, tmp1, tmp2);
5922 5957
5923 // Handle more complex cases in runtime. 5958 // Handle more complex cases in runtime.
5924 __ bind(&runtime); 5959 __ bind(&runtime);
5925 __ pop(tmp1); // Return address. 5960 __ pop(tmp1); // Return address.
5926 __ push(left); 5961 __ push(left);
5927 __ push(right); 5962 __ push(right);
5928 __ push(tmp1); 5963 __ push(tmp1);
5929 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 5964 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
5930 5965
5931 __ bind(&miss); 5966 __ bind(&miss);
5932 GenerateMiss(masm); 5967 GenerateMiss(masm);
5933 } 5968 }
5934 5969
5935 5970
5936 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { 5971 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5937 ASSERT(state_ == CompareIC::OBJECTS); 5972 ASSERT(state_ == CompareIC::OBJECTS);
5938 NearLabel miss; 5973 Label miss;
5939 __ mov(ecx, Operand(edx)); 5974 __ mov(ecx, Operand(edx));
5940 __ and_(ecx, Operand(eax)); 5975 __ and_(ecx, Operand(eax));
5941 __ test(ecx, Immediate(kSmiTagMask)); 5976 __ test(ecx, Immediate(kSmiTagMask));
5942 __ j(zero, &miss, not_taken); 5977 __ j(zero, &miss, Label::kNear);
5943 5978
5944 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); 5979 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
5945 __ j(not_equal, &miss, not_taken); 5980 __ j(not_equal, &miss, Label::kNear);
5946 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); 5981 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
5947 __ j(not_equal, &miss, not_taken); 5982 __ j(not_equal, &miss, Label::kNear);
5948 5983
5949 ASSERT(GetCondition() == equal); 5984 ASSERT(GetCondition() == equal);
5950 __ sub(eax, Operand(edx)); 5985 __ sub(eax, Operand(edx));
5951 __ ret(0); 5986 __ ret(0);
5952 5987
5953 __ bind(&miss); 5988 __ bind(&miss);
5954 GenerateMiss(masm); 5989 GenerateMiss(masm);
5955 } 5990 }
5956 5991
5957 5992
(...skipping 21 matching lines...) Expand all
5979 __ pop(ecx); 6014 __ pop(ecx);
5980 __ pop(eax); 6015 __ pop(eax);
5981 __ pop(edx); 6016 __ pop(edx);
5982 __ push(ecx); 6017 __ push(ecx);
5983 6018
5984 // Do a tail call to the rewritten stub. 6019 // Do a tail call to the rewritten stub.
5985 __ jmp(Operand(edi)); 6020 __ jmp(Operand(edi));
5986 } 6021 }
5987 6022
5988 6023
6024 // Helper function used to check that the dictionary doesn't contain
6025 // the property. This function may return false negatives, so miss_label
6026 // must always call a backup property check that is complete.
6027 // This function is safe to call if the receiver has fast properties.
6028 // Name must be a symbol and receiver must be a heap object.
6029 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup(
6030 MacroAssembler* masm,
6031 Label* miss,
6032 Label* done,
6033 Register properties,
6034 String* name,
6035 Register r0) {
6036 ASSERT(name->IsSymbol());
6037
6038 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6039 // not equal to the name and kProbes-th slot is not used (its name is the
6040 // undefined value), it guarantees the hash table doesn't contain the
6041 // property. It's true even if some slots represent deleted properties
6042 // (their names are the null value).
6043 for (int i = 0; i < kInlinedProbes; i++) {
6044 // Compute the masked index: (hash + i + i * i) & mask.
6045 Register index = r0;
6046 // Capacity is smi 2^n.
6047 __ mov(index, FieldOperand(properties, kCapacityOffset));
6048 __ dec(index);
6049 __ and_(Operand(index),
6050 Immediate(Smi::FromInt(name->Hash() +
6051 StringDictionary::GetProbeOffset(i))));
6052
6053 // Scale the index by multiplying by the entry size.
6054 ASSERT(StringDictionary::kEntrySize == 3);
6055 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
6056 Register entity_name = r0;
6057 // Having undefined at this place means the name is not contained.
6058 ASSERT_EQ(kSmiTagSize, 1);
6059 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
6060 kElementsStartOffset - kHeapObjectTag));
6061 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
6062 __ j(equal, done);
6063
6064 // Stop if found the property.
6065 __ cmp(entity_name, Handle<String>(name));
6066 __ j(equal, miss);
6067
6068 // Check if the entry name is not a symbol.
6069 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
6070 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
6071 kIsSymbolMask);
6072 __ j(zero, miss);
6073 }
6074
6075 StringDictionaryLookupStub stub(properties,
6076 r0,
6077 r0,
6078 StringDictionaryLookupStub::NEGATIVE_LOOKUP);
6079 __ push(Immediate(Handle<Object>(name)));
6080 __ push(Immediate(name->Hash()));
6081 MaybeObject* result = masm->TryCallStub(&stub);
6082 if (result->IsFailure()) return result;
6083 __ test(r0, Operand(r0));
6084 __ j(not_zero, miss);
6085 __ jmp(done);
6086 return result;
6087 }
6088
6089
6090 // Probe the string dictionary in the |elements| register. Jump to the
6091 // |done| label if a property with the given name is found leaving the
6092 // index into the dictionary in |r0|. Jump to the |miss| label
6093 // otherwise.
6094 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
6095 Label* miss,
6096 Label* done,
6097 Register elements,
6098 Register name,
6099 Register r0,
6100 Register r1) {
6101 // Assert that name contains a string.
6102 if (FLAG_debug_code) __ AbortIfNotString(name);
6103
6104 __ mov(r1, FieldOperand(elements, kCapacityOffset));
6105 __ shr(r1, kSmiTagSize); // convert smi to int
6106 __ dec(r1);
6107
6108 // Generate an unrolled loop that performs a few probes before
6109 // giving up. Measurements done on Gmail indicate that 2 probes
6110 // cover ~93% of loads from dictionaries.
6111 for (int i = 0; i < kInlinedProbes; i++) {
6112 // Compute the masked index: (hash + i + i * i) & mask.
6113 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6114 __ shr(r0, String::kHashShift);
6115 if (i > 0) {
6116 __ add(Operand(r0), Immediate(StringDictionary::GetProbeOffset(i)));
6117 }
6118 __ and_(r0, Operand(r1));
6119
6120 // Scale the index by multiplying by the entry size.
6121 ASSERT(StringDictionary::kEntrySize == 3);
6122 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
6123
6124 // Check if the key is identical to the name.
6125 __ cmp(name, Operand(elements,
6126 r0,
6127 times_4,
6128 kElementsStartOffset - kHeapObjectTag));
6129 __ j(equal, done);
6130 }
6131
6132 StringDictionaryLookupStub stub(elements,
6133 r1,
6134 r0,
6135 POSITIVE_LOOKUP);
6136 __ push(name);
6137 __ mov(r0, FieldOperand(name, String::kHashFieldOffset));
6138 __ shr(r0, String::kHashShift);
6139 __ push(r0);
6140 __ CallStub(&stub);
6141
6142 __ test(r1, Operand(r1));
6143 __ j(zero, miss);
6144 __ jmp(done);
6145 }
6146
6147
6148 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
6149 // Stack frame on entry:
6150 // esp[0 * kPointerSize]: return address.
6151 // esp[1 * kPointerSize]: key's hash.
6152 // esp[2 * kPointerSize]: key.
6153 // Registers:
6154 // dictionary_: StringDictionary to probe.
6155 // result_: used as scratch.
6156 // index_: will hold an index of entry if lookup is successful.
6157 // might alias with result_.
6158 // Returns:
6159 // result_ is zero if lookup failed, non zero otherwise.
6160
6161 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
6162
6163 Register scratch = result_;
6164
6165 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
6166 __ dec(scratch);
6167 __ SmiUntag(scratch);
6168 __ push(scratch);
6169
6170 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6171 // not equal to the name and kProbes-th slot is not used (its name is the
6172 // undefined value), it guarantees the hash table doesn't contain the
6173 // property. It's true even if some slots represent deleted properties
6174 // (their names are the null value).
6175 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
6176 // Compute the masked index: (hash + i + i * i) & mask.
6177 __ mov(scratch, Operand(esp, 2 * kPointerSize));
6178 if (i > 0) {
6179 __ add(Operand(scratch),
6180 Immediate(StringDictionary::GetProbeOffset(i)));
6181 }
6182 __ and_(scratch, Operand(esp, 0));
6183
6184 // Scale the index by multiplying by the entry size.
6185 ASSERT(StringDictionary::kEntrySize == 3);
6186 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
6187
6188 // Having undefined at this place means the name is not contained.
6189 ASSERT_EQ(kSmiTagSize, 1);
6190 __ mov(scratch, Operand(dictionary_,
6191 index_,
6192 times_pointer_size,
6193 kElementsStartOffset - kHeapObjectTag));
6194 __ cmp(scratch, masm->isolate()->factory()->undefined_value());
6195 __ j(equal, &not_in_dictionary);
6196
6197 // Stop if found the property.
6198 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
6199 __ j(equal, &in_dictionary);
6200
6201 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
6202 // If we hit a non symbol key during negative lookup
6203 // we have to bailout as this key might be equal to the
6204 // key we are looking for.
6205
6206 // Check if the entry name is not a symbol.
6207 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
6208 __ test_b(FieldOperand(scratch, Map::kInstanceTypeOffset),
6209 kIsSymbolMask);
6210 __ j(zero, &maybe_in_dictionary);
6211 }
6212 }
6213
6214 __ bind(&maybe_in_dictionary);
6215 // If we are doing negative lookup then probing failure should be
6216 // treated as a lookup success. For positive lookup probing failure
6217 // should be treated as lookup failure.
6218 if (mode_ == POSITIVE_LOOKUP) {
6219 __ mov(result_, Immediate(0));
6220 __ Drop(1);
6221 __ ret(2 * kPointerSize);
6222 }
6223
6224 __ bind(&in_dictionary);
6225 __ mov(result_, Immediate(1));
6226 __ Drop(1);
6227 __ ret(2 * kPointerSize);
6228
6229 __ bind(&not_in_dictionary);
6230 __ mov(result_, Immediate(0));
6231 __ Drop(1);
6232 __ ret(2 * kPointerSize);
6233 }
5989 // Takes the input in 3 registers: address_ value_ and object_. A pointer to 6234 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
5990 // the value has just been written into the object, now this stub makes sure 6235 // the value has just been written into the object, now this stub makes sure
5991 // we keep the GC informed. The word in the object where the value has been 6236 // we keep the GC informed. The word in the object where the value has been
5992 // written is in the address register. 6237 // written is in the address register.
5993 void RecordWriteStub::Generate(MacroAssembler* masm) { 6238 void RecordWriteStub::Generate(MacroAssembler* masm) {
5994 NearLabel skip_non_incremental_part; 6239 Label skip_non_incremental_part;
5995 __ jmp(&skip_non_incremental_part); 6240 __ jmp(&skip_non_incremental_part, Label::kNear);
5996 if (!HEAP->incremental_marking()->IsMarking()) { 6241 if (!HEAP->incremental_marking()->IsMarking()) {
5997 ASSERT(masm->get_opcode(-2) == kSkipNonIncrementalPartInstruction); 6242 ASSERT(masm->get_opcode(-2) == kSkipNonIncrementalPartInstruction);
5998 masm->set_opcode(-2, kTwoByteNopInstruction); 6243 masm->set_opcode(-2, kTwoByteNopInstruction);
5999 } 6244 }
6000 6245
6001 if (emit_remembered_set_ == EMIT_REMEMBERED_SET) { 6246 if (emit_remembered_set_ == EMIT_REMEMBERED_SET) {
6002 NearLabel skip;
6003 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_); 6247 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_);
6004 __ bind(&skip);
6005 } 6248 }
6006 __ ret(0); 6249 __ ret(0);
6007 6250
6008 __ bind(&skip_non_incremental_part); 6251 __ bind(&skip_non_incremental_part);
6009 __ mov(value_, Operand(address_, 0)); 6252 __ mov(value_, Operand(address_, 0));
6010 GenerateIncremental(masm); 6253 GenerateIncremental(masm);
6011 } 6254 }
6012 6255
6013 6256
6014 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) { 6257 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) {
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
6079 regs_.scratch1(), 6322 regs_.scratch1(),
6080 &value_in_new_space_object_is_black_no_remembered_set); 6323 &value_in_new_space_object_is_black_no_remembered_set);
6081 regs_.Restore(masm); 6324 regs_.Restore(masm);
6082 __ ret(0); 6325 __ ret(0);
6083 } 6326 }
6084 6327
6085 6328
6086 void RecordWriteStub:: 6329 void RecordWriteStub::
6087 GenerateIncrementalValueIsInNewSpaceObjectIsInOldSpaceRememberedSet( 6330 GenerateIncrementalValueIsInNewSpaceObjectIsInOldSpaceRememberedSet(
6088 MacroAssembler* masm) { 6331 MacroAssembler* masm) {
6089 NearLabel object_is_black, must_inform_both; 6332 Label object_is_black, must_inform_both, must_inform_both_far;
6090 Label must_inform_both_far;
6091 6333
6092 // Lets look at the colour of the object: If it is not black we don't have to 6334 // Lets look at the colour of the object: If it is not black we don't have to
6093 // inform the incremental marker. 6335 // inform the incremental marker.
6094 __ InOldSpaceIsBlack(regs_.object(), 6336 __ InOldSpaceIsBlack(regs_.object(),
6095 regs_.scratch0(), 6337 regs_.scratch0(),
6096 regs_.scratch1(), 6338 regs_.scratch1(),
6097 &object_is_black); 6339 &object_is_black,
6340 Label::kNear);
6098 regs_.Restore(masm); 6341 regs_.Restore(masm);
6099 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_); 6342 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_);
6100 __ ret(0); 6343 __ ret(0);
6101 6344
6102 __ bind(&object_is_black); 6345 __ bind(&object_is_black);
6103 6346
6104 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 6347 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
6105 6348
6106 __ push(regs_.object()); 6349 __ push(regs_.object());
6107 __ EnsureNotWhite(regs_.scratch0(), // The value. 6350 __ EnsureNotWhite(regs_.scratch0(), // The value.
6108 regs_.scratch1(), // Scratch. 6351 regs_.scratch1(), // Scratch.
6109 regs_.object(), // Scratch. 6352 regs_.object(), // Scratch.
6110 &must_inform_both, 6353 &must_inform_both,
6354 Label::kNear,
6111 true); // In new space. 6355 true); // In new space.
6112 __ pop(regs_.object()); 6356 __ pop(regs_.object());
6113 regs_.Restore(masm); 6357 regs_.Restore(masm);
6114 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_); 6358 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_);
6115 __ ret(0); 6359 __ ret(0);
6116 6360
6117 __ bind(&must_inform_both); 6361 __ bind(&must_inform_both);
6118 // Both the incremental marker and the the remembered set have to be informed. 6362 // Both the incremental marker and the the remembered set have to be informed.
6119 __ pop(regs_.object()); 6363 __ pop(regs_.object());
6120 __ bind(&must_inform_both_far); 6364 __ bind(&must_inform_both_far);
(...skipping 15 matching lines...) Expand all
6136 regs_.Restore(masm); 6380 regs_.Restore(masm);
6137 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_); 6381 __ RememberedSetHelper(address_, value_, save_fp_regs_mode_);
6138 __ ret(0); 6382 __ ret(0);
6139 } 6383 }
6140 6384
6141 6385
6142 void RecordWriteStub:: 6386 void RecordWriteStub::
6143 GenerateIncrementalValueIsInNewSpaceObjectIsInOldSpaceNoRememberedSet( 6387 GenerateIncrementalValueIsInNewSpaceObjectIsInOldSpaceNoRememberedSet(
6144 MacroAssembler* masm, 6388 MacroAssembler* masm,
6145 Label* value_in_new_space_object_is_black_no_remembered_set) { 6389 Label* value_in_new_space_object_is_black_no_remembered_set) {
6146 NearLabel object_is_black, inform_incremental_marker; 6390 Label object_is_black, inform_incremental_marker;
6147 6391
6148 __ InOldSpaceIsBlack(regs_.object(), 6392 __ InOldSpaceIsBlack(regs_.object(),
6149 regs_.scratch0(), 6393 regs_.scratch0(),
6150 regs_.scratch1(), 6394 regs_.scratch1(),
6151 &object_is_black); 6395 &object_is_black,
6396 Label::kNear);
6152 regs_.Restore(masm); 6397 regs_.Restore(masm);
6153 __ ret(0); 6398 __ ret(0);
6154 6399
6155 __ bind(&object_is_black); 6400 __ bind(&object_is_black);
6156 __ bind(value_in_new_space_object_is_black_no_remembered_set); 6401 __ bind(value_in_new_space_object_is_black_no_remembered_set);
6157 6402
6158 // Reload the value from the word in the object. 6403 // Reload the value from the word in the object.
6159 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 6404 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
6160 6405
6161 // We need one more scratch register in this case. Use the object register. 6406 // We need one more scratch register in this case. Use the object register.
6162 __ push(regs_.object()); 6407 __ push(regs_.object());
6163 6408
6164 // Make sure the value is not white. If we can't do that, jump to the label. 6409 // Make sure the value is not white. If we can't do that, jump to the label.
6165 __ EnsureNotWhite(regs_.scratch0(), // The value. 6410 __ EnsureNotWhite(regs_.scratch0(), // The value.
6166 regs_.scratch1(), // Scratch. 6411 regs_.scratch1(), // Scratch.
6167 regs_.object(), // Scratch. 6412 regs_.object(), // Scratch.
6168 &inform_incremental_marker, 6413 &inform_incremental_marker,
6414 Label::kNear,
6169 true); // In new space. 6415 true); // In new space.
6170 __ pop(regs_.object()); 6416 __ pop(regs_.object());
6171 regs_.Restore(masm); 6417 regs_.Restore(masm);
6172 __ ret(0); 6418 __ ret(0);
6173 6419
6174 __ bind(&inform_incremental_marker); 6420 __ bind(&inform_incremental_marker);
6175 __ pop(regs_.object()); 6421 __ pop(regs_.object());
6176 6422
6177 __ jmp(&slow_); 6423 __ jmp(&slow_);
6178 } 6424 }
6179 6425
6180 6426
6181 void RecordWriteStub::GenerateIncrementalValueIsInOldSpace( 6427 void RecordWriteStub::GenerateIncrementalValueIsInOldSpace(
6182 MacroAssembler* masm) { 6428 MacroAssembler* masm) {
6183 NearLabel value_is_white; 6429 Label value_is_white, value_in_old_space_and_white_object_in_new_space;
6184 Label value_in_old_space_and_white_object_in_new_space;
6185 // If the value is in old space then the remembered set doesn't care. We may 6430 // If the value is in old space then the remembered set doesn't care. We may
6186 // be able to avoid logging anything if the incremental marker doesn't care 6431 // be able to avoid logging anything if the incremental marker doesn't care
6187 // either. 6432 // either.
6188 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 6433 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
6189 __ push(regs_.object()); 6434 __ push(regs_.object());
6190 __ EnsureNotWhite(regs_.scratch0(), // The value. 6435 __ EnsureNotWhite(regs_.scratch0(), // The value.
6191 regs_.scratch1(), // Scratch. 6436 regs_.scratch1(), // Scratch.
6192 regs_.object(), // Scratch. 6437 regs_.object(), // Scratch.
6193 &value_is_white, 6438 &value_is_white,
6439 Label::kNear,
6194 false); // In old space. 6440 false); // In old space.
6195 __ pop(regs_.object()); 6441 __ pop(regs_.object());
6196 regs_.Restore(masm); 6442 regs_.Restore(masm);
6197 __ ret(0); 6443 __ ret(0);
6198 6444
6199 // The value is in old space and white. We have to find out which space the 6445 // The value is in old space and white. We have to find out which space the
6200 // object is in in order to find its colour. 6446 // object is in in order to find its colour.
6201 __ bind(&value_is_white); 6447 __ bind(&value_is_white);
6202 __ pop(regs_.object()); 6448 __ pop(regs_.object());
6203 __ InNewSpace(regs_.object(), 6449 __ InNewSpace(regs_.object(),
(...skipping 18 matching lines...) Expand all
6222 regs_.Restore(masm); 6468 regs_.Restore(masm);
6223 __ ret(0); 6469 __ ret(0);
6224 } 6470 }
6225 6471
6226 6472
6227 #undef __ 6473 #undef __
6228 6474
6229 } } // namespace v8::internal 6475 } } // namespace v8::internal
6230 6476
6231 #endif // V8_TARGET_ARCH_IA32 6477 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.h ('k') | src/ia32/frames-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698