Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(54)

Side by Side Diff: src/ia32/macro-assembler-ia32.cc

Issue 6529032: Merge 6168:6800 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/macro-assembler-ia32.h ('k') | src/ia32/stub-cache-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
79 CallStub(&store_buffer_overflow); 79 CallStub(&store_buffer_overflow);
80 bind(&no_overflow); 80 bind(&no_overflow);
81 } 81 }
82 82
83 83
84 void MacroAssembler::RecordWrite(Register object, 84 void MacroAssembler::RecordWrite(Register object,
85 int offset, 85 int offset,
86 Register value, 86 Register value,
87 Register scratch, 87 Register scratch,
88 SaveFPRegsMode save_fp) { 88 SaveFPRegsMode save_fp) {
89 // The compiled code assumes that record write doesn't change the
90 // context register, so we check that none of the clobbered
91 // registers are esi.
92 ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
93
94 // First, check if a write barrier is even needed. The tests below 89 // First, check if a write barrier is even needed. The tests below
95 // catch stores of Smis and stores into young gen. 90 // catch stores of Smis and stores into young gen.
96 NearLabel done; 91 NearLabel done;
97 92
98 // Skip barrier if writing a smi. 93 // Skip barrier if writing a smi.
99 ASSERT_EQ(0, kSmiTag); 94 ASSERT_EQ(0, kSmiTag);
100 test(value, Immediate(kSmiTagMask)); 95 test(value, Immediate(kSmiTagMask));
101 j(zero, &done); 96 j(zero, &done);
102 97
103 InNewSpace(object, value, equal, &done); 98 InNewSpace(object, value, equal, &done);
(...skipping 27 matching lines...) Expand all
131 mov(value, Immediate(BitCast<int32_t>(kZapValue))); 126 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
132 mov(scratch, Immediate(BitCast<int32_t>(kZapValue))); 127 mov(scratch, Immediate(BitCast<int32_t>(kZapValue)));
133 } 128 }
134 } 129 }
135 130
136 131
137 void MacroAssembler::RecordWrite(Register object, 132 void MacroAssembler::RecordWrite(Register object,
138 Register address, 133 Register address,
139 Register value, 134 Register value,
140 SaveFPRegsMode save_fp) { 135 SaveFPRegsMode save_fp) {
141 // The compiled code assumes that record write doesn't change the
142 // context register, so we check that none of the clobbered
143 // registers are esi.
144 ASSERT(!object.is(esi) && !value.is(esi) && !address.is(esi));
145
146 // First, check if a write barrier is even needed. The tests below 136 // First, check if a write barrier is even needed. The tests below
147 // catch stores of Smis and stores into young gen. 137 // catch stores of Smis and stores into young gen.
148 Label done; 138 Label done;
149 139
150 // Skip barrier if writing a smi. 140 // Skip barrier if writing a smi.
151 ASSERT_EQ(0, kSmiTag); 141 ASSERT_EQ(0, kSmiTag);
152 test(value, Immediate(kSmiTagMask)); 142 test(value, Immediate(kSmiTagMask));
153 j(zero, &done); 143 j(zero, &done);
154 144
155 InNewSpace(object, value, equal, &done); 145 InNewSpace(object, value, equal, &done);
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
341 mov(Operand::StaticVariable(context_address), esi); 331 mov(Operand::StaticVariable(context_address), esi);
342 } 332 }
343 333
344 334
345 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) { 335 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
346 // Optionally save all XMM registers. 336 // Optionally save all XMM registers.
347 if (save_doubles) { 337 if (save_doubles) {
348 CpuFeatures::Scope scope(SSE2); 338 CpuFeatures::Scope scope(SSE2);
349 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize; 339 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
350 sub(Operand(esp), Immediate(space)); 340 sub(Operand(esp), Immediate(space));
351 int offset = -2 * kPointerSize; 341 const int offset = -2 * kPointerSize;
352 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 342 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
353 XMMRegister reg = XMMRegister::from_code(i); 343 XMMRegister reg = XMMRegister::from_code(i);
354 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg); 344 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
355 } 345 }
356 } else { 346 } else {
357 sub(Operand(esp), Immediate(argc * kPointerSize)); 347 sub(Operand(esp), Immediate(argc * kPointerSize));
358 } 348 }
359 349
360 // Get the required frame alignment for the OS. 350 // Get the required frame alignment for the OS.
361 static const int kFrameAlignment = OS::ActivationFrameAlignment(); 351 static const int kFrameAlignment = OS::ActivationFrameAlignment();
(...skipping 22 matching lines...) Expand all
384 void MacroAssembler::EnterApiExitFrame(int argc) { 374 void MacroAssembler::EnterApiExitFrame(int argc) {
385 EnterExitFramePrologue(); 375 EnterExitFramePrologue();
386 EnterExitFrameEpilogue(argc, false); 376 EnterExitFrameEpilogue(argc, false);
387 } 377 }
388 378
389 379
390 void MacroAssembler::LeaveExitFrame(bool save_doubles) { 380 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
391 // Optionally restore all XMM registers. 381 // Optionally restore all XMM registers.
392 if (save_doubles) { 382 if (save_doubles) {
393 CpuFeatures::Scope scope(SSE2); 383 CpuFeatures::Scope scope(SSE2);
394 if (save_doubles) { 384 const int offset = -2 * kPointerSize;
395 int offset = -2 * kPointerSize; 385 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
396 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 386 XMMRegister reg = XMMRegister::from_code(i);
397 XMMRegister reg = XMMRegister::from_code(i); 387 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
398 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
399 }
400 } else if (FLAG_debug_code) {
401 // Zap all fp registers on a runtime call if we were not asked to preserve
402 // them.
403 push(eax);
404 mov(eax, Factory::nan_value());
405 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
406 XMMRegister reg = XMMRegister::from_code(i);
407 movdbl(reg, FieldOperand(eax, HeapNumber::kValueOffset));
408 }
409 pop(eax);
410 } 388 }
411 } 389 }
412 390
413 // Get the return address from the stack and restore the frame pointer. 391 // Get the return address from the stack and restore the frame pointer.
414 mov(ecx, Operand(ebp, 1 * kPointerSize)); 392 mov(ecx, Operand(ebp, 1 * kPointerSize));
415 mov(ebp, Operand(ebp, 0 * kPointerSize)); 393 mov(ebp, Operand(ebp, 0 * kPointerSize));
416 394
417 // Pop the arguments and the receiver from the caller stack. 395 // Pop the arguments and the receiver from the caller stack.
418 lea(esp, Operand(esi, 1 * kPointerSize)); 396 lea(esp, Operand(esi, 1 * kPointerSize));
419 397
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
472 } 450 }
473 451
474 452
475 void MacroAssembler::PopTryHandler() { 453 void MacroAssembler::PopTryHandler() {
476 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); 454 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
477 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address))); 455 pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
478 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); 456 add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
479 } 457 }
480 458
481 459
460 void MacroAssembler::Throw(Register value) {
461 // Adjust this code if not the case.
462 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
463
464 // eax must hold the exception.
465 if (!value.is(eax)) {
466 mov(eax, value);
467 }
468
469 // Drop the sp to the top of the handler.
470 ExternalReference handler_address(Top::k_handler_address);
471 mov(esp, Operand::StaticVariable(handler_address));
472
473 // Restore next handler and frame pointer, discard handler state.
474 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
475 pop(Operand::StaticVariable(handler_address));
476 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
477 pop(ebp);
478 pop(edx); // Remove state.
479
480 // Before returning we restore the context from the frame pointer if
481 // not NULL. The frame pointer is NULL in the exception handler of
482 // a JS entry frame.
483 Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
484 NearLabel skip;
485 cmp(ebp, 0);
486 j(equal, &skip, not_taken);
487 mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
488 bind(&skip);
489
490 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
491 ret(0);
492 }
493
494
495 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
496 Register value) {
497 // Adjust this code if not the case.
498 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
499
500 // eax must hold the exception.
501 if (!value.is(eax)) {
502 mov(eax, value);
503 }
504
505 // Drop sp to the top stack handler.
506 ExternalReference handler_address(Top::k_handler_address);
507 mov(esp, Operand::StaticVariable(handler_address));
508
509 // Unwind the handlers until the ENTRY handler is found.
510 NearLabel loop, done;
511 bind(&loop);
512 // Load the type of the current stack handler.
513 const int kStateOffset = StackHandlerConstants::kStateOffset;
514 cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
515 j(equal, &done);
516 // Fetch the next handler in the list.
517 const int kNextOffset = StackHandlerConstants::kNextOffset;
518 mov(esp, Operand(esp, kNextOffset));
519 jmp(&loop);
520 bind(&done);
521
522 // Set the top handler address to next handler past the current ENTRY handler.
523 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
524 pop(Operand::StaticVariable(handler_address));
525
526 if (type == OUT_OF_MEMORY) {
527 // Set external caught exception to false.
528 ExternalReference external_caught(Top::k_external_caught_exception_address);
529 mov(eax, false);
530 mov(Operand::StaticVariable(external_caught), eax);
531
532 // Set pending exception and eax to out of memory exception.
533 ExternalReference pending_exception(Top::k_pending_exception_address);
534 mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
535 mov(Operand::StaticVariable(pending_exception), eax);
536 }
537
538 // Clear the context pointer.
539 Set(esi, Immediate(0));
540
541 // Restore fp from handler and discard handler state.
542 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
543 pop(ebp);
544 pop(edx); // State.
545
546 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
547 ret(0);
548 }
549
550
482 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, 551 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
483 Register scratch, 552 Register scratch,
484 Label* miss) { 553 Label* miss) {
485 Label same_contexts; 554 Label same_contexts;
486 555
487 ASSERT(!holder_reg.is(scratch)); 556 ASSERT(!holder_reg.is(scratch));
488 557
489 // Load current lexical context from the stack frame. 558 // Load current lexical context from the stack frame.
490 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset)); 559 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
491 560
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
618 687
619 // Load address of new object into result. 688 // Load address of new object into result.
620 LoadAllocationTopHelper(result, scratch, flags); 689 LoadAllocationTopHelper(result, scratch, flags);
621 690
622 Register top_reg = result_end.is_valid() ? result_end : result; 691 Register top_reg = result_end.is_valid() ? result_end : result;
623 692
624 // Calculate new top and bail out if new space is exhausted. 693 // Calculate new top and bail out if new space is exhausted.
625 ExternalReference new_space_allocation_limit = 694 ExternalReference new_space_allocation_limit =
626 ExternalReference::new_space_allocation_limit_address(); 695 ExternalReference::new_space_allocation_limit_address();
627 696
628 if (top_reg.is(result)) { 697 if (!top_reg.is(result)) {
629 add(Operand(top_reg), Immediate(object_size)); 698 mov(top_reg, result);
630 } else {
631 lea(top_reg, Operand(result, object_size));
632 } 699 }
700 add(Operand(top_reg), Immediate(object_size));
701 j(carry, gc_required, not_taken);
633 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit)); 702 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
634 j(above, gc_required, not_taken); 703 j(above, gc_required, not_taken);
635 704
636 // Update allocation top. 705 // Update allocation top.
637 UpdateAllocationTopHelper(top_reg, scratch); 706 UpdateAllocationTopHelper(top_reg, scratch);
638 707
639 // Tag result if requested. 708 // Tag result if requested.
640 if (top_reg.is(result)) { 709 if (top_reg.is(result)) {
641 if ((flags & TAG_OBJECT) != 0) { 710 if ((flags & TAG_OBJECT) != 0) {
642 sub(Operand(result), Immediate(object_size - kHeapObjectTag)); 711 sub(Operand(result), Immediate(object_size - kHeapObjectTag));
(...skipping 28 matching lines...) Expand all
671 return; 740 return;
672 } 741 }
673 ASSERT(!result.is(result_end)); 742 ASSERT(!result.is(result_end));
674 743
675 // Load address of new object into result. 744 // Load address of new object into result.
676 LoadAllocationTopHelper(result, scratch, flags); 745 LoadAllocationTopHelper(result, scratch, flags);
677 746
678 // Calculate new top and bail out if new space is exhausted. 747 // Calculate new top and bail out if new space is exhausted.
679 ExternalReference new_space_allocation_limit = 748 ExternalReference new_space_allocation_limit =
680 ExternalReference::new_space_allocation_limit_address(); 749 ExternalReference::new_space_allocation_limit_address();
681 lea(result_end, Operand(result, element_count, element_size, header_size)); 750
751 // We assume that element_count*element_size + header_size does not
752 // overflow.
753 lea(result_end, Operand(element_count, element_size, header_size));
754 add(result_end, Operand(result));
755 j(carry, gc_required);
682 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); 756 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
683 j(above, gc_required); 757 j(above, gc_required);
684 758
685 // Tag result if requested. 759 // Tag result if requested.
686 if ((flags & TAG_OBJECT) != 0) { 760 if ((flags & TAG_OBJECT) != 0) {
687 lea(result, Operand(result, kHeapObjectTag)); 761 lea(result, Operand(result, kHeapObjectTag));
688 } 762 }
689 763
690 // Update allocation top. 764 // Update allocation top.
691 UpdateAllocationTopHelper(result_end, scratch); 765 UpdateAllocationTopHelper(result_end, scratch);
(...skipping 24 matching lines...) Expand all
716 // Load address of new object into result. 790 // Load address of new object into result.
717 LoadAllocationTopHelper(result, scratch, flags); 791 LoadAllocationTopHelper(result, scratch, flags);
718 792
719 // Calculate new top and bail out if new space is exhausted. 793 // Calculate new top and bail out if new space is exhausted.
720 ExternalReference new_space_allocation_limit = 794 ExternalReference new_space_allocation_limit =
721 ExternalReference::new_space_allocation_limit_address(); 795 ExternalReference::new_space_allocation_limit_address();
722 if (!object_size.is(result_end)) { 796 if (!object_size.is(result_end)) {
723 mov(result_end, object_size); 797 mov(result_end, object_size);
724 } 798 }
725 add(result_end, Operand(result)); 799 add(result_end, Operand(result));
800 j(carry, gc_required, not_taken);
726 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); 801 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
727 j(above, gc_required, not_taken); 802 j(above, gc_required, not_taken);
728 803
729 // Tag result if requested. 804 // Tag result if requested.
730 if ((flags & TAG_OBJECT) != 0) { 805 if ((flags & TAG_OBJECT) != 0) {
731 lea(result, Operand(result, kHeapObjectTag)); 806 lea(result, Operand(result, kHeapObjectTag));
732 } 807 }
733 808
734 // Update allocation top. 809 // Update allocation top.
735 UpdateAllocationTopHelper(result_end, scratch); 810 UpdateAllocationTopHelper(result_end, scratch);
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
891 scratch1, 966 scratch1,
892 scratch2, 967 scratch2,
893 gc_required, 968 gc_required,
894 TAG_OBJECT); 969 TAG_OBJECT);
895 970
896 // Set the map. The other fields are left uninitialized. 971 // Set the map. The other fields are left uninitialized.
897 mov(FieldOperand(result, HeapObject::kMapOffset), 972 mov(FieldOperand(result, HeapObject::kMapOffset),
898 Immediate(Factory::cons_ascii_string_map())); 973 Immediate(Factory::cons_ascii_string_map()));
899 } 974 }
900 975
901 // All registers must be distinct. Only current_string needs valid contents
902 // on entry. All registers may be invalid on exit. result_operand is
903 // unchanged, padding_chars is updated correctly.
904 void MacroAssembler::AppendStringToTopOfNewSpace(
905 Register current_string, // Tagged pointer to string to copy.
906 Register current_string_length,
907 Register result_pos,
908 Register scratch,
909 Register new_padding_chars,
910 Operand operand_result,
911 Operand operand_padding_chars,
912 Label* bailout) {
913 mov(current_string_length,
914 FieldOperand(current_string, String::kLengthOffset));
915 shr(current_string_length, 1);
916 sub(current_string_length, operand_padding_chars);
917 mov(new_padding_chars, current_string_length);
918 add(Operand(current_string_length), Immediate(kObjectAlignmentMask));
919 and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask));
920 sub(new_padding_chars, Operand(current_string_length));
921 neg(new_padding_chars);
922 // We need an allocation even if current_string_length is 0, to fetch
923 // result_pos. Consider using a faster fetch of result_pos in that case.
924 AllocateInNewSpace(current_string_length, result_pos, scratch, no_reg,
925 bailout, NO_ALLOCATION_FLAGS);
926 sub(result_pos, operand_padding_chars);
927 mov(operand_padding_chars, new_padding_chars);
928 976
929 Register scratch_2 = new_padding_chars; // Used to compute total length. 977 // Copy memory, byte-by-byte, from source to destination. Not optimized for
930 // Copy string to the end of result. 978 // long or aligned copies. The contents of scratch and length are destroyed.
931 mov(current_string_length, 979 // Source and destination are incremented by length.
932 FieldOperand(current_string, String::kLengthOffset)); 980 // Many variants of movsb, loop unrolling, word moves, and indexed operands
933 mov(scratch, operand_result); 981 // have been tried here already, and this is fastest.
934 mov(scratch_2, current_string_length); 982 // A simpler loop is faster on small copies, but 30% slower on large ones.
935 add(scratch_2, FieldOperand(scratch, String::kLengthOffset)); 983 // The cld() instruction must have been emitted, to set the direction flag(),
936 mov(FieldOperand(scratch, String::kLengthOffset), scratch_2); 984 // before calling this function.
937 shr(current_string_length, 1); 985 void MacroAssembler::CopyBytes(Register source,
938 lea(current_string, 986 Register destination,
939 FieldOperand(current_string, SeqAsciiString::kHeaderSize)); 987 Register length,
940 // Loop condition: while (--current_string_length >= 0). 988 Register scratch) {
941 Label copy_loop; 989 Label loop, done, short_string, short_loop;
942 Label copy_loop_entry; 990 // Experimentation shows that the short string loop is faster if length < 10.
943 jmp(&copy_loop_entry); 991 cmp(Operand(length), Immediate(10));
944 bind(&copy_loop); 992 j(less_equal, &short_string);
945 mov_b(scratch, Operand(current_string, current_string_length, times_1, 0)); 993
946 mov_b(Operand(result_pos, current_string_length, times_1, 0), scratch); 994 ASSERT(source.is(esi));
947 bind(&copy_loop_entry); 995 ASSERT(destination.is(edi));
948 sub(Operand(current_string_length), Immediate(1)); 996 ASSERT(length.is(ecx));
949 j(greater_equal, &copy_loop); 997
998 // Because source is 4-byte aligned in our uses of this function,
999 // we keep source aligned for the rep_movs call by copying the odd bytes
1000 // at the end of the ranges.
1001 mov(scratch, Operand(source, length, times_1, -4));
1002 mov(Operand(destination, length, times_1, -4), scratch);
1003 mov(scratch, ecx);
1004 shr(ecx, 2);
1005 rep_movs();
1006 and_(Operand(scratch), Immediate(0x3));
1007 add(destination, Operand(scratch));
1008 jmp(&done);
1009
1010 bind(&short_string);
1011 test(length, Operand(length));
1012 j(zero, &done);
1013
1014 bind(&short_loop);
1015 mov_b(scratch, Operand(source, 0));
1016 mov_b(Operand(destination, 0), scratch);
1017 inc(source);
1018 inc(destination);
1019 dec(length);
1020 j(not_zero, &short_loop);
1021
1022 bind(&done);
950 } 1023 }
951 1024
952 1025
953 void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen, 1026 void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
954 Register result, 1027 Register result,
955 Register op, 1028 Register op,
956 JumpTarget* then_target) { 1029 JumpTarget* then_target) {
957 JumpTarget ok; 1030 JumpTarget ok;
958 test(result, Operand(result)); 1031 test(result, Operand(result));
959 ok.Branch(not_zero, taken); 1032 ok.Branch(not_zero, taken);
(...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after
1303 sub(Operand::StaticVariable(level_address), Immediate(1)); 1376 sub(Operand::StaticVariable(level_address), Immediate(1));
1304 Assert(above_equal, "Invalid HandleScope level"); 1377 Assert(above_equal, "Invalid HandleScope level");
1305 cmp(edi, Operand::StaticVariable(limit_address)); 1378 cmp(edi, Operand::StaticVariable(limit_address));
1306 j(not_equal, &delete_allocated_handles, not_taken); 1379 j(not_equal, &delete_allocated_handles, not_taken);
1307 bind(&leave_exit_frame); 1380 bind(&leave_exit_frame);
1308 1381
1309 // Check if the function scheduled an exception. 1382 // Check if the function scheduled an exception.
1310 ExternalReference scheduled_exception_address = 1383 ExternalReference scheduled_exception_address =
1311 ExternalReference::scheduled_exception_address(); 1384 ExternalReference::scheduled_exception_address();
1312 cmp(Operand::StaticVariable(scheduled_exception_address), 1385 cmp(Operand::StaticVariable(scheduled_exception_address),
1313 Immediate(Factory::the_hole_value())); 1386 Immediate(Factory::the_hole_value()));
1314 j(not_equal, &promote_scheduled_exception, not_taken); 1387 j(not_equal, &promote_scheduled_exception, not_taken);
1315 LeaveApiExitFrame(); 1388 LeaveApiExitFrame();
1316 ret(stack_space * kPointerSize); 1389 ret(stack_space * kPointerSize);
1317 bind(&promote_scheduled_exception); 1390 bind(&promote_scheduled_exception);
1318 MaybeObject* result = 1391 MaybeObject* result =
1319 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); 1392 TryTailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1320 if (result->IsFailure()) { 1393 if (result->IsFailure()) {
1321 return result; 1394 return result;
1322 } 1395 }
1323 bind(&empty_handle); 1396 bind(&empty_handle);
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
1538 void MacroAssembler::LoadContext(Register dst, int context_chain_length) { 1611 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
1539 if (context_chain_length > 0) { 1612 if (context_chain_length > 0) {
1540 // Move up the chain of contexts to the context containing the slot. 1613 // Move up the chain of contexts to the context containing the slot.
1541 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX))); 1614 mov(dst, Operand(esi, Context::SlotOffset(Context::CLOSURE_INDEX)));
1542 // Load the function context (which is the incoming, outer context). 1615 // Load the function context (which is the incoming, outer context).
1543 mov(dst, FieldOperand(dst, JSFunction::kContextOffset)); 1616 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1544 for (int i = 1; i < context_chain_length; i++) { 1617 for (int i = 1; i < context_chain_length; i++) {
1545 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX))); 1618 mov(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
1546 mov(dst, FieldOperand(dst, JSFunction::kContextOffset)); 1619 mov(dst, FieldOperand(dst, JSFunction::kContextOffset));
1547 } 1620 }
1548 // The context may be an intermediate context, not a function context. 1621 } else {
1549 mov(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); 1622 // Slot is in the current function context. Move it into the
1550 } else { // Slot is in the current function context. 1623 // destination register in case we store into it (the write barrier
1551 // The context may be an intermediate context, not a function context. 1624 // cannot be allowed to destroy the context in esi).
1552 mov(dst, Operand(esi, Context::SlotOffset(Context::FCONTEXT_INDEX))); 1625 mov(dst, esi);
1626 }
1627
1628 // We should not have found a 'with' context by walking the context chain
1629 // (i.e., the static scope chain and runtime context chain do not agree).
1630 // A variable occurring in such a scope should have slot type LOOKUP and
1631 // not CONTEXT.
1632 if (FLAG_debug_code) {
1633 cmp(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
1634 Check(equal, "Yo dawg, I heard you liked function contexts "
1635 "so I put function contexts in all your contexts");
1553 } 1636 }
1554 } 1637 }
1555 1638
1556 1639
1557 void MacroAssembler::LoadGlobalFunction(int index, Register function) { 1640 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
1558 // Load the global or builtins object from the current context. 1641 // Load the global or builtins object from the current context.
1559 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); 1642 mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
1560 // Load the global context from the global or builtins object. 1643 // Load the global context from the global or builtins object.
1561 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); 1644 mov(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
1562 // Load the function from the global context. 1645 // Load the function from the global context.
(...skipping 23 matching lines...) Expand all
1586 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters); 1669 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1587 return kNumSafepointRegisters - reg_code - 1; 1670 return kNumSafepointRegisters - reg_code - 1;
1588 } 1671 }
1589 1672
1590 1673
1591 void MacroAssembler::Ret() { 1674 void MacroAssembler::Ret() {
1592 ret(0); 1675 ret(0);
1593 } 1676 }
1594 1677
1595 1678
1679 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1680 if (is_uint16(bytes_dropped)) {
1681 ret(bytes_dropped);
1682 } else {
1683 pop(scratch);
1684 add(Operand(esp), Immediate(bytes_dropped));
1685 push(scratch);
1686 ret(0);
1687 }
1688 }
1689
1690
1691
1692
1596 void MacroAssembler::Drop(int stack_elements) { 1693 void MacroAssembler::Drop(int stack_elements) {
1597 if (stack_elements > 0) { 1694 if (stack_elements > 0) {
1598 add(Operand(esp), Immediate(stack_elements * kPointerSize)); 1695 add(Operand(esp), Immediate(stack_elements * kPointerSize));
1599 } 1696 }
1600 } 1697 }
1601 1698
1602 1699
1603 void MacroAssembler::Move(Register dst, Register src) { 1700 void MacroAssembler::Move(Register dst, Register src) {
1604 if (!dst.is(src)) { 1701 if (!dst.is(src)) {
1605 mov(dst, src); 1702 mov(dst, src);
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
1728 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 1825 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
1729 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 1826 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
1730 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 1827 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
1731 #ifdef DEBUG 1828 #ifdef DEBUG
1732 if (msg != NULL) { 1829 if (msg != NULL) {
1733 RecordComment("Abort message: "); 1830 RecordComment("Abort message: ");
1734 RecordComment(msg); 1831 RecordComment(msg);
1735 } 1832 }
1736 #endif 1833 #endif
1737 // Disable stub call restrictions to always allow calls to abort. 1834 // Disable stub call restrictions to always allow calls to abort.
1738 set_allow_stub_calls(true); 1835 AllowStubCallsScope allow_scope(this, true);
1739 1836
1740 push(eax); 1837 push(eax);
1741 push(Immediate(p0)); 1838 push(Immediate(p0));
1742 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); 1839 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
1743 CallRuntime(Runtime::kAbort, 2); 1840 CallRuntime(Runtime::kAbort, 2);
1744 // will not return here 1841 // will not return here
1745 int3(); 1842 int3();
1746 } 1843 }
1747 1844
1748 1845
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
1911 2008
1912 // Check that the code was patched as expected. 2009 // Check that the code was patched as expected.
1913 ASSERT(masm_.pc_ == address_ + size_); 2010 ASSERT(masm_.pc_ == address_ + size_);
1914 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2011 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
1915 } 2012 }
1916 2013
1917 2014
1918 } } // namespace v8::internal 2015 } } // namespace v8::internal
1919 2016
1920 #endif // V8_TARGET_ARCH_IA32 2017 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/macro-assembler-ia32.h ('k') | src/ia32/stub-cache-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698