Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(292)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 20843012: Extract hardcoded error strings into a single place and replace them with enum. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: styles fixed Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 502 matching lines...) Expand 10 before | Expand all | Expand 10 after
513 __ li(a2, Operand(Smi::FromInt(length))); 513 __ li(a2, Operand(Smi::FromInt(length)));
514 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); 514 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
515 515
516 // If this block context is nested in the native context we get a smi 516 // If this block context is nested in the native context we get a smi
517 // sentinel instead of a function. The block context should get the 517 // sentinel instead of a function. The block context should get the
518 // canonical empty function of the native context as its closure which 518 // canonical empty function of the native context as its closure which
519 // we still have to look up. 519 // we still have to look up.
520 Label after_sentinel; 520 Label after_sentinel;
521 __ JumpIfNotSmi(a3, &after_sentinel); 521 __ JumpIfNotSmi(a3, &after_sentinel);
522 if (FLAG_debug_code) { 522 if (FLAG_debug_code) {
523 const char* message = "Expected 0 as a Smi sentinel"; 523 __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
524 __ Assert(eq, message, a3, Operand(zero_reg));
525 } 524 }
526 __ lw(a3, GlobalObjectOperand()); 525 __ lw(a3, GlobalObjectOperand());
527 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); 526 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
528 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); 527 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
529 __ bind(&after_sentinel); 528 __ bind(&after_sentinel);
530 529
531 // Set up the fixed slots, copy the global object from the previous context. 530 // Set up the fixed slots, copy the global object from the previous context.
532 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 531 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
533 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); 532 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
534 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); 533 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
672 Register object, 671 Register object,
673 FPURegister dst, 672 FPURegister dst,
674 Register dst1, 673 Register dst1,
675 Register dst2, 674 Register dst2,
676 Register heap_number_map, 675 Register heap_number_map,
677 Register scratch1, 676 Register scratch1,
678 Register scratch2, 677 Register scratch2,
679 Label* not_number) { 678 Label* not_number) {
680 __ AssertRootValue(heap_number_map, 679 __ AssertRootValue(heap_number_map,
681 Heap::kHeapNumberMapRootIndex, 680 Heap::kHeapNumberMapRootIndex,
682 "HeapNumberMap register clobbered."); 681 kHeapNumberMapRegisterClobbered);
683 682
684 Label is_smi, done; 683 Label is_smi, done;
685 684
686 // Smi-check 685 // Smi-check
687 __ UntagAndJumpIfSmi(scratch1, object, &is_smi); 686 __ UntagAndJumpIfSmi(scratch1, object, &is_smi);
688 // Heap number check 687 // Heap number check
689 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); 688 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
690 689
691 // Handle loading a double from a heap number. 690 // Handle loading a double from a heap number.
692 if (destination == kFPURegisters) { 691 if (destination == kFPURegisters) {
(...skipping 29 matching lines...) Expand all
722 Register object, 721 Register object,
723 Register dst, 722 Register dst,
724 Register heap_number_map, 723 Register heap_number_map,
725 Register scratch1, 724 Register scratch1,
726 Register scratch2, 725 Register scratch2,
727 Register scratch3, 726 Register scratch3,
728 FPURegister double_scratch, 727 FPURegister double_scratch,
729 Label* not_number) { 728 Label* not_number) {
730 __ AssertRootValue(heap_number_map, 729 __ AssertRootValue(heap_number_map,
731 Heap::kHeapNumberMapRootIndex, 730 Heap::kHeapNumberMapRootIndex,
732 "HeapNumberMap register clobbered."); 731 kHeapNumberMapRegisterClobbered);
733 Label done; 732 Label done;
734 Label not_in_int32_range; 733 Label not_in_int32_range;
735 734
736 __ UntagAndJumpIfSmi(dst, object, &done); 735 __ UntagAndJumpIfSmi(dst, object, &done);
737 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset)); 736 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset));
738 __ Branch(not_number, ne, scratch1, Operand(heap_number_map)); 737 __ Branch(not_number, ne, scratch1, Operand(heap_number_map));
739 __ ConvertToInt32(object, 738 __ ConvertToInt32(object,
740 dst, 739 dst,
741 scratch1, 740 scratch1,
742 scratch2, 741 scratch2,
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
799 798
800 __ JumpIfNotSmi(object, &obj_is_not_smi); 799 __ JumpIfNotSmi(object, &obj_is_not_smi);
801 __ SmiUntag(scratch1, object); 800 __ SmiUntag(scratch1, object);
802 ConvertIntToDouble(masm, scratch1, destination, double_dst, dst_mantissa, 801 ConvertIntToDouble(masm, scratch1, destination, double_dst, dst_mantissa,
803 dst_exponent, scratch2, single_scratch); 802 dst_exponent, scratch2, single_scratch);
804 __ Branch(&done); 803 __ Branch(&done);
805 804
806 __ bind(&obj_is_not_smi); 805 __ bind(&obj_is_not_smi);
807 __ AssertRootValue(heap_number_map, 806 __ AssertRootValue(heap_number_map,
808 Heap::kHeapNumberMapRootIndex, 807 Heap::kHeapNumberMapRootIndex,
809 "HeapNumberMap register clobbered."); 808 kHeapNumberMapRegisterClobbered);
810 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); 809 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
811 810
812 // Load the number. 811 // Load the number.
813 // Load the double value. 812 // Load the double value.
814 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset)); 813 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset));
815 814
816 Register except_flag = scratch2; 815 Register except_flag = scratch2;
817 __ EmitFPUTruncate(kRoundToZero, 816 __ EmitFPUTruncate(kRoundToZero,
818 scratch1, 817 scratch1,
819 double_dst, 818 double_dst,
(...skipping 26 matching lines...) Expand all
846 ASSERT(!scratch1.is(scratch2) && 845 ASSERT(!scratch1.is(scratch2) &&
847 !scratch1.is(scratch3) && 846 !scratch1.is(scratch3) &&
848 !scratch2.is(scratch3)); 847 !scratch2.is(scratch3));
849 848
850 Label done, maybe_undefined; 849 Label done, maybe_undefined;
851 850
852 __ UntagAndJumpIfSmi(dst, object, &done); 851 __ UntagAndJumpIfSmi(dst, object, &done);
853 852
854 __ AssertRootValue(heap_number_map, 853 __ AssertRootValue(heap_number_map,
855 Heap::kHeapNumberMapRootIndex, 854 Heap::kHeapNumberMapRootIndex,
856 "HeapNumberMap register clobbered."); 855 kHeapNumberMapRegisterClobbered);
857 856
858 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined); 857 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
859 858
860 // Object is a heap number. 859 // Object is a heap number.
861 // Convert the floating point value to a 32-bit integer. 860 // Convert the floating point value to a 32-bit integer.
862 // Load the double value. 861 // Load the double value.
863 __ ldc1(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset)); 862 __ ldc1(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset));
864 863
865 Register except_flag = scratch2; 864 Register except_flag = scratch2;
866 __ EmitFPUTruncate(kRoundToZero, 865 __ EmitFPUTruncate(kRoundToZero,
(...skipping 3405 matching lines...) Expand 10 before | Expand all | Expand 10 after
4272 STATIC_ASSERT(kSmiTag == 0); 4271 STATIC_ASSERT(kSmiTag == 0);
4273 __ JumpIfSmi(a0, &runtime); 4272 __ JumpIfSmi(a0, &runtime);
4274 __ GetObjectType(a0, a1, a1); 4273 __ GetObjectType(a0, a1, a1);
4275 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); 4274 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE));
4276 4275
4277 // Check that the RegExp has been compiled (data contains a fixed array). 4276 // Check that the RegExp has been compiled (data contains a fixed array).
4278 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); 4277 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset));
4279 if (FLAG_debug_code) { 4278 if (FLAG_debug_code) {
4280 __ And(t0, regexp_data, Operand(kSmiTagMask)); 4279 __ And(t0, regexp_data, Operand(kSmiTagMask));
4281 __ Check(nz, 4280 __ Check(nz,
4282 "Unexpected type for RegExp data, FixedArray expected", 4281 kUnexpectedTypeForRegExpDataFixedArrayExpected,
4283 t0, 4282 t0,
4284 Operand(zero_reg)); 4283 Operand(zero_reg));
4285 __ GetObjectType(regexp_data, a0, a0); 4284 __ GetObjectType(regexp_data, a0, a0);
4286 __ Check(eq, 4285 __ Check(eq,
4287 "Unexpected type for RegExp data, FixedArray expected", 4286 kUnexpectedTypeForRegExpDataFixedArrayExpected,
4288 a0, 4287 a0,
4289 Operand(FIXED_ARRAY_TYPE)); 4288 Operand(FIXED_ARRAY_TYPE));
4290 } 4289 }
4291 4290
4292 // regexp_data: RegExp data (FixedArray) 4291 // regexp_data: RegExp data (FixedArray)
4293 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 4292 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
4294 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); 4293 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset));
4295 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); 4294 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP)));
4296 4295
4297 // regexp_data: RegExp data (FixedArray) 4296 // regexp_data: RegExp data (FixedArray)
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after
4632 4631
4633 // (7) External string. Make it, offset-wise, look like a sequential string. 4632 // (7) External string. Make it, offset-wise, look like a sequential string.
4634 __ bind(&external_string); 4633 __ bind(&external_string);
4635 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); 4634 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
4636 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); 4635 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
4637 if (FLAG_debug_code) { 4636 if (FLAG_debug_code) {
4638 // Assert that we do not have a cons or slice (indirect strings) here. 4637 // Assert that we do not have a cons or slice (indirect strings) here.
4639 // Sequential strings have already been ruled out. 4638 // Sequential strings have already been ruled out.
4640 __ And(at, a0, Operand(kIsIndirectStringMask)); 4639 __ And(at, a0, Operand(kIsIndirectStringMask));
4641 __ Assert(eq, 4640 __ Assert(eq,
4642 "external string expected, but not found", 4641 kExternalStringExpectedButNotFound,
4643 at, 4642 at,
4644 Operand(zero_reg)); 4643 Operand(zero_reg));
4645 } 4644 }
4646 __ lw(subject, 4645 __ lw(subject,
4647 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); 4646 FieldMemOperand(subject, ExternalString::kResourceDataOffset));
4648 // Move the pointer so that offset-wise, it looks like a sequential string. 4647 // Move the pointer so that offset-wise, it looks like a sequential string.
4649 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 4648 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
4650 __ Subu(subject, 4649 __ Subu(subject,
4651 subject, 4650 subject,
4652 SeqTwoByteString::kHeaderSize - kHeapObjectTag); 4651 SeqTwoByteString::kHeaderSize - kHeapObjectTag);
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
5013 &call_runtime_); 5012 &call_runtime_);
5014 5013
5015 __ sll(result_, result_, kSmiTagSize); 5014 __ sll(result_, result_, kSmiTagSize);
5016 __ bind(&exit_); 5015 __ bind(&exit_);
5017 } 5016 }
5018 5017
5019 5018
5020 void StringCharCodeAtGenerator::GenerateSlow( 5019 void StringCharCodeAtGenerator::GenerateSlow(
5021 MacroAssembler* masm, 5020 MacroAssembler* masm,
5022 const RuntimeCallHelper& call_helper) { 5021 const RuntimeCallHelper& call_helper) {
5023 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); 5022 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
5024 5023
5025 // Index is not a smi. 5024 // Index is not a smi.
5026 __ bind(&index_not_smi_); 5025 __ bind(&index_not_smi_);
5027 // If index is a heap number, try converting it to an integer. 5026 // If index is a heap number, try converting it to an integer.
5028 __ CheckMap(index_, 5027 __ CheckMap(index_,
5029 result_, 5028 result_,
5030 Heap::kHeapNumberMapRootIndex, 5029 Heap::kHeapNumberMapRootIndex,
5031 index_not_number_, 5030 index_not_number_,
5032 DONT_DO_SMI_CHECK); 5031 DONT_DO_SMI_CHECK);
5033 call_helper.BeforeCall(masm); 5032 call_helper.BeforeCall(masm);
(...skipping 28 matching lines...) Expand all
5062 call_helper.BeforeCall(masm); 5061 call_helper.BeforeCall(masm);
5063 __ sll(index_, index_, kSmiTagSize); 5062 __ sll(index_, index_, kSmiTagSize);
5064 __ Push(object_, index_); 5063 __ Push(object_, index_);
5065 __ CallRuntime(Runtime::kStringCharCodeAt, 2); 5064 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
5066 5065
5067 __ Move(result_, v0); 5066 __ Move(result_, v0);
5068 5067
5069 call_helper.AfterCall(masm); 5068 call_helper.AfterCall(masm);
5070 __ jmp(&exit_); 5069 __ jmp(&exit_);
5071 5070
5072 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); 5071 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
5073 } 5072 }
5074 5073
5075 5074
5076 // ------------------------------------------------------------------------- 5075 // -------------------------------------------------------------------------
5077 // StringCharFromCodeGenerator 5076 // StringCharFromCodeGenerator
5078 5077
5079 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 5078 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
5080 // Fast case of Heap::LookupSingleCharacterStringFromCode. 5079 // Fast case of Heap::LookupSingleCharacterStringFromCode.
5081 5080
5082 ASSERT(!t0.is(result_)); 5081 ASSERT(!t0.is(result_));
(...skipping 16 matching lines...) Expand all
5099 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); 5098 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
5100 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); 5099 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
5101 __ Branch(&slow_case_, eq, result_, Operand(t0)); 5100 __ Branch(&slow_case_, eq, result_, Operand(t0));
5102 __ bind(&exit_); 5101 __ bind(&exit_);
5103 } 5102 }
5104 5103
5105 5104
5106 void StringCharFromCodeGenerator::GenerateSlow( 5105 void StringCharFromCodeGenerator::GenerateSlow(
5107 MacroAssembler* masm, 5106 MacroAssembler* masm,
5108 const RuntimeCallHelper& call_helper) { 5107 const RuntimeCallHelper& call_helper) {
5109 __ Abort("Unexpected fallthrough to CharFromCode slow case"); 5108 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
5110 5109
5111 __ bind(&slow_case_); 5110 __ bind(&slow_case_);
5112 call_helper.BeforeCall(masm); 5111 call_helper.BeforeCall(masm);
5113 __ push(code_); 5112 __ push(code_);
5114 __ CallRuntime(Runtime::kCharFromCode, 1); 5113 __ CallRuntime(Runtime::kCharFromCode, 1);
5115 __ Move(result_, v0); 5114 __ Move(result_, v0);
5116 5115
5117 call_helper.AfterCall(masm); 5116 call_helper.AfterCall(masm);
5118 __ Branch(&exit_); 5117 __ Branch(&exit_);
5119 5118
5120 __ Abort("Unexpected fallthrough from CharFromCode slow case"); 5119 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
5121 } 5120 }
5122 5121
5123 5122
5124 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, 5123 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
5125 Register dest, 5124 Register dest,
5126 Register src, 5125 Register src,
5127 Register count, 5126 Register count,
5128 Register scratch, 5127 Register scratch,
5129 bool ascii) { 5128 bool ascii) {
5130 Label loop; 5129 Label loop;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
5165 Register scratch5, 5164 Register scratch5,
5166 int flags) { 5165 int flags) {
5167 bool ascii = (flags & COPY_ASCII) != 0; 5166 bool ascii = (flags & COPY_ASCII) != 0;
5168 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0; 5167 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0;
5169 5168
5170 if (dest_always_aligned && FLAG_debug_code) { 5169 if (dest_always_aligned && FLAG_debug_code) {
5171 // Check that destination is actually word aligned if the flag says 5170 // Check that destination is actually word aligned if the flag says
5172 // that it is. 5171 // that it is.
5173 __ And(scratch4, dest, Operand(kPointerAlignmentMask)); 5172 __ And(scratch4, dest, Operand(kPointerAlignmentMask));
5174 __ Check(eq, 5173 __ Check(eq,
5175 "Destination of copy not aligned.", 5174 kDestinationOfCopyNotAligned,
5176 scratch4, 5175 scratch4,
5177 Operand(zero_reg)); 5176 Operand(zero_reg));
5178 } 5177 }
5179 5178
5180 const int kReadAlignment = 4; 5179 const int kReadAlignment = 4;
5181 const int kReadAlignmentMask = kReadAlignment - 1; 5180 const int kReadAlignmentMask = kReadAlignment - 1;
5182 // Ensure that reading an entire aligned word containing the last character 5181 // Ensure that reading an entire aligned word containing the last character
5183 // of a string will not read outside the allocated area (because we pad up 5182 // of a string will not read outside the allocated area (because we pad up
5184 // to kObjectAlignment). 5183 // to kObjectAlignment).
5185 STATIC_ASSERT(kObjectAlignment >= kReadAlignment); 5184 STATIC_ASSERT(kObjectAlignment >= kReadAlignment);
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
5365 5364
5366 // If entry is undefined no string with this hash can be found. 5365 // If entry is undefined no string with this hash can be found.
5367 Label is_string; 5366 Label is_string;
5368 __ GetObjectType(candidate, scratch, scratch); 5367 __ GetObjectType(candidate, scratch, scratch);
5369 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE)); 5368 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE));
5370 5369
5371 __ Branch(not_found, eq, undefined, Operand(candidate)); 5370 __ Branch(not_found, eq, undefined, Operand(candidate));
5372 // Must be the hole (deleted entry). 5371 // Must be the hole (deleted entry).
5373 if (FLAG_debug_code) { 5372 if (FLAG_debug_code) {
5374 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); 5373 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
5375 __ Assert(eq, "oddball in string table is not undefined or the hole", 5374 __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole,
5376 scratch, Operand(candidate)); 5375 scratch, Operand(candidate));
5377 } 5376 }
5378 __ jmp(&next_probe[i]); 5377 __ jmp(&next_probe[i]);
5379 5378
5380 __ bind(&is_string); 5379 __ bind(&is_string);
5381 5380
5382 // Check that the candidate is a non-external ASCII string. The instance 5381 // Check that the candidate is a non-external ASCII string. The instance
5383 // type is still in the scratch register from the CompareObjectType 5382 // type is still in the scratch register from the CompareObjectType
5384 // operation. 5383 // operation.
5385 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); 5384 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after
6573 // No need to pop or drop anything, LeaveExitFrame will restore the old 6572 // No need to pop or drop anything, LeaveExitFrame will restore the old
6574 // stack, thus dropping the allocated space for the return value. 6573 // stack, thus dropping the allocated space for the return value.
6575 // The saved ra is after the reserved stack space for the 4 args. 6574 // The saved ra is after the reserved stack space for the 4 args.
6576 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); 6575 __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
6577 6576
6578 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 6577 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
6579 // In case of an error the return address may point to a memory area 6578 // In case of an error the return address may point to a memory area
6580 // filled with kZapValue by the GC. 6579 // filled with kZapValue by the GC.
6581 // Dereference the address and check for this. 6580 // Dereference the address and check for this.
6582 __ lw(t0, MemOperand(t9)); 6581 __ lw(t0, MemOperand(t9));
6583 __ Assert(ne, "Received invalid return address.", t0, 6582 __ Assert(ne, kReceivedInvalidReturnAddress, t0,
6584 Operand(reinterpret_cast<uint32_t>(kZapValue))); 6583 Operand(reinterpret_cast<uint32_t>(kZapValue)));
6585 } 6584 }
6586 __ Jump(t9); 6585 __ Jump(t9);
6587 } 6586 }
6588 6587
6589 6588
6590 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, 6589 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6591 Register target) { 6590 Register target) {
6592 __ Move(t9, target); 6591 __ Move(t9, target);
6593 __ AssertStackIsAligned(); 6592 __ AssertStackIsAligned();
(...skipping 730 matching lines...) Expand 10 before | Expand all | Expand 10 after
7324 for (int i = 0; i <= last_index; ++i) { 7323 for (int i = 0; i <= last_index; ++i) {
7325 Label next; 7324 Label next;
7326 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7325 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7327 __ Branch(&next, ne, a3, Operand(kind)); 7326 __ Branch(&next, ne, a3, Operand(kind));
7328 T stub(kind); 7327 T stub(kind);
7329 __ TailCallStub(&stub); 7328 __ TailCallStub(&stub);
7330 __ bind(&next); 7329 __ bind(&next);
7331 } 7330 }
7332 7331
7333 // If we reached this point there is a problem. 7332 // If we reached this point there is a problem.
7334 __ Abort("Unexpected ElementsKind in array constructor"); 7333 __ Abort(kUnexpectedElementsKindInArrayConstructor);
7335 } 7334 }
7336 7335
7337 7336
7338 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { 7337 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
7339 // a2 - type info cell 7338 // a2 - type info cell
7340 // a3 - kind 7339 // a3 - kind
7341 // a0 - number of arguments 7340 // a0 - number of arguments
7342 // a1 - constructor? 7341 // a1 - constructor?
7343 // sp[0] - last argument 7342 // sp[0] - last argument
7344 ASSERT(FAST_SMI_ELEMENTS == 0); 7343 ASSERT(FAST_SMI_ELEMENTS == 0);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
7379 for (int i = 0; i <= last_index; ++i) { 7378 for (int i = 0; i <= last_index; ++i) {
7380 Label next; 7379 Label next;
7381 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7380 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7382 __ Branch(&next, ne, a3, Operand(kind)); 7381 __ Branch(&next, ne, a3, Operand(kind));
7383 ArraySingleArgumentConstructorStub stub(kind); 7382 ArraySingleArgumentConstructorStub stub(kind);
7384 __ TailCallStub(&stub); 7383 __ TailCallStub(&stub);
7385 __ bind(&next); 7384 __ bind(&next);
7386 } 7385 }
7387 7386
7388 // If we reached this point there is a problem. 7387 // If we reached this point there is a problem.
7389 __ Abort("Unexpected ElementsKind in array constructor"); 7388 __ Abort(kUnexpectedElementsKindInArrayConstructor);
7390 } 7389 }
7391 7390
7392 7391
7393 template<class T> 7392 template<class T>
7394 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 7393 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
7395 int to_index = GetSequenceIndexFromFastElementsKind( 7394 int to_index = GetSequenceIndexFromFastElementsKind(
7396 TERMINAL_FAST_ELEMENTS_KIND); 7395 TERMINAL_FAST_ELEMENTS_KIND);
7397 for (int i = 0; i <= to_index; ++i) { 7396 for (int i = 0; i <= to_index; ++i) {
7398 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7397 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7399 T stub(kind); 7398 T stub(kind);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
7440 // -- sp[4] : last argument 7439 // -- sp[4] : last argument
7441 // ----------------------------------- 7440 // -----------------------------------
7442 if (FLAG_debug_code) { 7441 if (FLAG_debug_code) {
7443 // The array construct code is only set for the global and natives 7442 // The array construct code is only set for the global and natives
7444 // builtin Array functions which always have maps. 7443 // builtin Array functions which always have maps.
7445 7444
7446 // Initial map for the builtin Array function should be a map. 7445 // Initial map for the builtin Array function should be a map.
7447 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 7446 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
7448 // Will both indicate a NULL and a Smi. 7447 // Will both indicate a NULL and a Smi.
7449 __ And(at, a3, Operand(kSmiTagMask)); 7448 __ And(at, a3, Operand(kSmiTagMask));
7450 __ Assert(ne, "Unexpected initial map for Array function", 7449 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
7451 at, Operand(zero_reg)); 7450 at, Operand(zero_reg));
7452 __ GetObjectType(a3, a3, t0); 7451 __ GetObjectType(a3, a3, t0);
7453 __ Assert(eq, "Unexpected initial map for Array function", 7452 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
7454 t0, Operand(MAP_TYPE)); 7453 t0, Operand(MAP_TYPE));
7455 7454
7456 // We should either have undefined in a2 or a valid cell. 7455 // We should either have undefined in a2 or a valid cell.
7457 Label okay_here; 7456 Label okay_here;
7458 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 7457 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
7459 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 7458 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
7460 __ Branch(&okay_here, eq, a2, Operand(at)); 7459 __ Branch(&okay_here, eq, a2, Operand(at));
7461 __ lw(a3, FieldMemOperand(a2, 0)); 7460 __ lw(a3, FieldMemOperand(a2, 0));
7462 __ Assert(eq, "Expected property cell in register a2", 7461 __ Assert(eq, kExpectedPropertyCellInRegisterA2,
7463 a3, Operand(cell_map)); 7462 a3, Operand(cell_map));
7464 __ bind(&okay_here); 7463 __ bind(&okay_here);
7465 } 7464 }
7466 7465
7467 Label no_info, switch_ready; 7466 Label no_info, switch_ready;
7468 // Get the elements kind and case on that. 7467 // Get the elements kind and case on that.
7469 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 7468 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
7470 __ Branch(&no_info, eq, a2, Operand(at)); 7469 __ Branch(&no_info, eq, a2, Operand(at));
7471 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 7470 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
7472 7471
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
7552 // ----------------------------------- 7551 // -----------------------------------
7553 7552
7554 if (FLAG_debug_code) { 7553 if (FLAG_debug_code) {
7555 // The array construct code is only set for the global and natives 7554 // The array construct code is only set for the global and natives
7556 // builtin Array functions which always have maps. 7555 // builtin Array functions which always have maps.
7557 7556
7558 // Initial map for the builtin Array function should be a map. 7557 // Initial map for the builtin Array function should be a map.
7559 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 7558 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
7560 // Will both indicate a NULL and a Smi. 7559 // Will both indicate a NULL and a Smi.
7561 __ And(at, a3, Operand(kSmiTagMask)); 7560 __ And(at, a3, Operand(kSmiTagMask));
7562 __ Assert(ne, "Unexpected initial map for Array function", 7561 __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
7563 at, Operand(zero_reg)); 7562 at, Operand(zero_reg));
7564 __ GetObjectType(a3, a3, t0); 7563 __ GetObjectType(a3, a3, t0);
7565 __ Assert(eq, "Unexpected initial map for Array function", 7564 __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
7566 t0, Operand(MAP_TYPE)); 7565 t0, Operand(MAP_TYPE));
7567 } 7566 }
7568 7567
7569 // Figure out the right elements kind. 7568 // Figure out the right elements kind.
7570 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); 7569 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
7571 7570
7572 // Load the map's "bit field 2" into a3. We only need the first byte, 7571 // Load the map's "bit field 2" into a3. We only need the first byte,
7573 // but the following bit field extraction takes care of that anyway. 7572 // but the following bit field extraction takes care of that anyway.
7574 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); 7573 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
7575 // Retrieve elements_kind from bit field 2. 7574 // Retrieve elements_kind from bit field 2.
7576 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount); 7575 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount);
7577 7576
7578 if (FLAG_debug_code) { 7577 if (FLAG_debug_code) {
7579 Label done; 7578 Label done;
7580 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); 7579 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
7581 __ Assert( 7580 __ Assert(
7582 eq, "Invalid ElementsKind for InternalArray or InternalPackedArray", 7581 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
7583 a3, Operand(FAST_HOLEY_ELEMENTS)); 7582 a3, Operand(FAST_HOLEY_ELEMENTS));
7584 __ bind(&done); 7583 __ bind(&done);
7585 } 7584 }
7586 7585
7587 Label fast_elements_case; 7586 Label fast_elements_case;
7588 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); 7587 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS));
7589 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 7588 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
7590 7589
7591 __ bind(&fast_elements_case); 7590 __ bind(&fast_elements_case);
7592 GenerateCase(masm, FAST_ELEMENTS); 7591 GenerateCase(masm, FAST_ELEMENTS);
7593 } 7592 }
7594 7593
7595 7594
7596 #undef __ 7595 #undef __
7597 7596
7598 } } // namespace v8::internal 7597 } } // namespace v8::internal
7599 7598
7600 #endif // V8_TARGET_ARCH_MIPS 7599 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« src/compiler.cc ('K') | « src/mips/builtins-mips.cc ('k') | src/mips/codegen-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698