OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
240 } | 240 } |
241 | 241 |
242 | 242 |
243 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( | 243 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( |
244 Isolate* isolate, | 244 Isolate* isolate, |
245 CodeStubInterfaceDescriptor* descriptor) { | 245 CodeStubInterfaceDescriptor* descriptor) { |
246 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); | 246 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); |
247 } | 247 } |
248 | 248 |
249 | 249 |
| 250 void UnaryOpStub::InitializeInterfaceDescriptor( |
| 251 Isolate* isolate, |
| 252 CodeStubInterfaceDescriptor* descriptor) { |
| 253 static Register registers[] = { a0 }; |
| 254 descriptor->register_param_count_ = 1; |
| 255 descriptor->register_params_ = registers; |
| 256 descriptor->deoptimization_handler_ = |
| 257 FUNCTION_ADDR(UnaryOpIC_Miss); |
| 258 } |
| 259 |
| 260 |
250 void StoreGlobalStub::InitializeInterfaceDescriptor( | 261 void StoreGlobalStub::InitializeInterfaceDescriptor( |
251 Isolate* isolate, | 262 Isolate* isolate, |
252 CodeStubInterfaceDescriptor* descriptor) { | 263 CodeStubInterfaceDescriptor* descriptor) { |
253 static Register registers[] = { a1, a2, a0 }; | 264 static Register registers[] = { a1, a2, a0 }; |
254 descriptor->register_param_count_ = 3; | 265 descriptor->register_param_count_ = 3; |
255 descriptor->register_params_ = registers; | 266 descriptor->register_params_ = registers; |
256 descriptor->deoptimization_handler_ = | 267 descriptor->deoptimization_handler_ = |
257 FUNCTION_ADDR(StoreIC_MissFromStubFailure); | 268 FUNCTION_ADDR(StoreIC_MissFromStubFailure); |
258 } | 269 } |
259 | 270 |
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
502 __ li(a2, Operand(Smi::FromInt(length))); | 513 __ li(a2, Operand(Smi::FromInt(length))); |
503 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | 514 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
504 | 515 |
505 // If this block context is nested in the native context we get a smi | 516 // If this block context is nested in the native context we get a smi |
506 // sentinel instead of a function. The block context should get the | 517 // sentinel instead of a function. The block context should get the |
507 // canonical empty function of the native context as its closure which | 518 // canonical empty function of the native context as its closure which |
508 // we still have to look up. | 519 // we still have to look up. |
509 Label after_sentinel; | 520 Label after_sentinel; |
510 __ JumpIfNotSmi(a3, &after_sentinel); | 521 __ JumpIfNotSmi(a3, &after_sentinel); |
511 if (FLAG_debug_code) { | 522 if (FLAG_debug_code) { |
512 __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg)); | 523 const char* message = "Expected 0 as a Smi sentinel"; |
| 524 __ Assert(eq, message, a3, Operand(zero_reg)); |
513 } | 525 } |
514 __ lw(a3, GlobalObjectOperand()); | 526 __ lw(a3, GlobalObjectOperand()); |
515 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); | 527 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); |
516 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); | 528 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); |
517 __ bind(&after_sentinel); | 529 __ bind(&after_sentinel); |
518 | 530 |
519 // Set up the fixed slots, copy the global object from the previous context. | 531 // Set up the fixed slots, copy the global object from the previous context. |
520 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 532 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
521 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); | 533 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); |
522 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); | 534 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
660 Register object, | 672 Register object, |
661 FPURegister dst, | 673 FPURegister dst, |
662 Register dst1, | 674 Register dst1, |
663 Register dst2, | 675 Register dst2, |
664 Register heap_number_map, | 676 Register heap_number_map, |
665 Register scratch1, | 677 Register scratch1, |
666 Register scratch2, | 678 Register scratch2, |
667 Label* not_number) { | 679 Label* not_number) { |
668 __ AssertRootValue(heap_number_map, | 680 __ AssertRootValue(heap_number_map, |
669 Heap::kHeapNumberMapRootIndex, | 681 Heap::kHeapNumberMapRootIndex, |
670 kHeapNumberMapRegisterClobbered); | 682 "HeapNumberMap register clobbered."); |
671 | 683 |
672 Label is_smi, done; | 684 Label is_smi, done; |
673 | 685 |
674 // Smi-check | 686 // Smi-check |
675 __ UntagAndJumpIfSmi(scratch1, object, &is_smi); | 687 __ UntagAndJumpIfSmi(scratch1, object, &is_smi); |
676 // Heap number check | 688 // Heap number check |
677 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); | 689 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); |
678 | 690 |
679 // Handle loading a double from a heap number. | 691 // Handle loading a double from a heap number. |
680 if (destination == kFPURegisters) { | 692 if (destination == kFPURegisters) { |
(...skipping 29 matching lines...) Expand all Loading... |
710 Register object, | 722 Register object, |
711 Register dst, | 723 Register dst, |
712 Register heap_number_map, | 724 Register heap_number_map, |
713 Register scratch1, | 725 Register scratch1, |
714 Register scratch2, | 726 Register scratch2, |
715 Register scratch3, | 727 Register scratch3, |
716 FPURegister double_scratch, | 728 FPURegister double_scratch, |
717 Label* not_number) { | 729 Label* not_number) { |
718 __ AssertRootValue(heap_number_map, | 730 __ AssertRootValue(heap_number_map, |
719 Heap::kHeapNumberMapRootIndex, | 731 Heap::kHeapNumberMapRootIndex, |
720 kHeapNumberMapRegisterClobbered); | 732 "HeapNumberMap register clobbered."); |
721 Label done; | 733 Label done; |
722 Label not_in_int32_range; | 734 Label not_in_int32_range; |
723 | 735 |
724 __ UntagAndJumpIfSmi(dst, object, &done); | 736 __ UntagAndJumpIfSmi(dst, object, &done); |
725 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset)); | 737 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset)); |
726 __ Branch(not_number, ne, scratch1, Operand(heap_number_map)); | 738 __ Branch(not_number, ne, scratch1, Operand(heap_number_map)); |
727 __ ConvertToInt32(object, | 739 __ ConvertToInt32(object, |
728 dst, | 740 dst, |
729 scratch1, | 741 scratch1, |
730 scratch2, | 742 scratch2, |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
787 | 799 |
788 __ JumpIfNotSmi(object, &obj_is_not_smi); | 800 __ JumpIfNotSmi(object, &obj_is_not_smi); |
789 __ SmiUntag(scratch1, object); | 801 __ SmiUntag(scratch1, object); |
790 ConvertIntToDouble(masm, scratch1, destination, double_dst, dst_mantissa, | 802 ConvertIntToDouble(masm, scratch1, destination, double_dst, dst_mantissa, |
791 dst_exponent, scratch2, single_scratch); | 803 dst_exponent, scratch2, single_scratch); |
792 __ Branch(&done); | 804 __ Branch(&done); |
793 | 805 |
794 __ bind(&obj_is_not_smi); | 806 __ bind(&obj_is_not_smi); |
795 __ AssertRootValue(heap_number_map, | 807 __ AssertRootValue(heap_number_map, |
796 Heap::kHeapNumberMapRootIndex, | 808 Heap::kHeapNumberMapRootIndex, |
797 kHeapNumberMapRegisterClobbered); | 809 "HeapNumberMap register clobbered."); |
798 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); | 810 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); |
799 | 811 |
800 // Load the number. | 812 // Load the number. |
801 // Load the double value. | 813 // Load the double value. |
802 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset)); | 814 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset)); |
803 | 815 |
804 Register except_flag = scratch2; | 816 Register except_flag = scratch2; |
805 __ EmitFPUTruncate(kRoundToZero, | 817 __ EmitFPUTruncate(kRoundToZero, |
806 scratch1, | 818 scratch1, |
807 double_dst, | 819 double_dst, |
(...skipping 26 matching lines...) Expand all Loading... |
834 ASSERT(!scratch1.is(scratch2) && | 846 ASSERT(!scratch1.is(scratch2) && |
835 !scratch1.is(scratch3) && | 847 !scratch1.is(scratch3) && |
836 !scratch2.is(scratch3)); | 848 !scratch2.is(scratch3)); |
837 | 849 |
838 Label done, maybe_undefined; | 850 Label done, maybe_undefined; |
839 | 851 |
840 __ UntagAndJumpIfSmi(dst, object, &done); | 852 __ UntagAndJumpIfSmi(dst, object, &done); |
841 | 853 |
842 __ AssertRootValue(heap_number_map, | 854 __ AssertRootValue(heap_number_map, |
843 Heap::kHeapNumberMapRootIndex, | 855 Heap::kHeapNumberMapRootIndex, |
844 kHeapNumberMapRegisterClobbered); | 856 "HeapNumberMap register clobbered."); |
845 | 857 |
846 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined); | 858 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined); |
847 | 859 |
848 // Object is a heap number. | 860 // Object is a heap number. |
849 // Convert the floating point value to a 32-bit integer. | 861 // Convert the floating point value to a 32-bit integer. |
850 // Load the double value. | 862 // Load the double value. |
851 __ ldc1(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset)); | 863 __ ldc1(double_scratch0, FieldMemOperand(object, HeapNumber::kValueOffset)); |
852 | 864 |
853 Register except_flag = scratch2; | 865 Register except_flag = scratch2; |
854 __ EmitFPUTruncate(kRoundToZero, | 866 __ EmitFPUTruncate(kRoundToZero, |
(...skipping 3405 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4260 STATIC_ASSERT(kSmiTag == 0); | 4272 STATIC_ASSERT(kSmiTag == 0); |
4261 __ JumpIfSmi(a0, &runtime); | 4273 __ JumpIfSmi(a0, &runtime); |
4262 __ GetObjectType(a0, a1, a1); | 4274 __ GetObjectType(a0, a1, a1); |
4263 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); | 4275 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); |
4264 | 4276 |
4265 // Check that the RegExp has been compiled (data contains a fixed array). | 4277 // Check that the RegExp has been compiled (data contains a fixed array). |
4266 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); | 4278 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); |
4267 if (FLAG_debug_code) { | 4279 if (FLAG_debug_code) { |
4268 __ And(t0, regexp_data, Operand(kSmiTagMask)); | 4280 __ And(t0, regexp_data, Operand(kSmiTagMask)); |
4269 __ Check(nz, | 4281 __ Check(nz, |
4270 kUnexpectedTypeForRegExpDataFixedArrayExpected, | 4282 "Unexpected type for RegExp data, FixedArray expected", |
4271 t0, | 4283 t0, |
4272 Operand(zero_reg)); | 4284 Operand(zero_reg)); |
4273 __ GetObjectType(regexp_data, a0, a0); | 4285 __ GetObjectType(regexp_data, a0, a0); |
4274 __ Check(eq, | 4286 __ Check(eq, |
4275 kUnexpectedTypeForRegExpDataFixedArrayExpected, | 4287 "Unexpected type for RegExp data, FixedArray expected", |
4276 a0, | 4288 a0, |
4277 Operand(FIXED_ARRAY_TYPE)); | 4289 Operand(FIXED_ARRAY_TYPE)); |
4278 } | 4290 } |
4279 | 4291 |
4280 // regexp_data: RegExp data (FixedArray) | 4292 // regexp_data: RegExp data (FixedArray) |
4281 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 4293 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
4282 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); | 4294 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); |
4283 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); | 4295 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); |
4284 | 4296 |
4285 // regexp_data: RegExp data (FixedArray) | 4297 // regexp_data: RegExp data (FixedArray) |
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4620 | 4632 |
4621 // (7) External string. Make it, offset-wise, look like a sequential string. | 4633 // (7) External string. Make it, offset-wise, look like a sequential string. |
4622 __ bind(&external_string); | 4634 __ bind(&external_string); |
4623 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 4635 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
4624 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); | 4636 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); |
4625 if (FLAG_debug_code) { | 4637 if (FLAG_debug_code) { |
4626 // Assert that we do not have a cons or slice (indirect strings) here. | 4638 // Assert that we do not have a cons or slice (indirect strings) here. |
4627 // Sequential strings have already been ruled out. | 4639 // Sequential strings have already been ruled out. |
4628 __ And(at, a0, Operand(kIsIndirectStringMask)); | 4640 __ And(at, a0, Operand(kIsIndirectStringMask)); |
4629 __ Assert(eq, | 4641 __ Assert(eq, |
4630 kExternalStringExpectedButNotFound, | 4642 "external string expected, but not found", |
4631 at, | 4643 at, |
4632 Operand(zero_reg)); | 4644 Operand(zero_reg)); |
4633 } | 4645 } |
4634 __ lw(subject, | 4646 __ lw(subject, |
4635 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); | 4647 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); |
4636 // Move the pointer so that offset-wise, it looks like a sequential string. | 4648 // Move the pointer so that offset-wise, it looks like a sequential string. |
4637 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | 4649 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); |
4638 __ Subu(subject, | 4650 __ Subu(subject, |
4639 subject, | 4651 subject, |
4640 SeqTwoByteString::kHeaderSize - kHeapObjectTag); | 4652 SeqTwoByteString::kHeaderSize - kHeapObjectTag); |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5001 &call_runtime_); | 5013 &call_runtime_); |
5002 | 5014 |
5003 __ sll(result_, result_, kSmiTagSize); | 5015 __ sll(result_, result_, kSmiTagSize); |
5004 __ bind(&exit_); | 5016 __ bind(&exit_); |
5005 } | 5017 } |
5006 | 5018 |
5007 | 5019 |
5008 void StringCharCodeAtGenerator::GenerateSlow( | 5020 void StringCharCodeAtGenerator::GenerateSlow( |
5009 MacroAssembler* masm, | 5021 MacroAssembler* masm, |
5010 const RuntimeCallHelper& call_helper) { | 5022 const RuntimeCallHelper& call_helper) { |
5011 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); | 5023 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); |
5012 | 5024 |
5013 // Index is not a smi. | 5025 // Index is not a smi. |
5014 __ bind(&index_not_smi_); | 5026 __ bind(&index_not_smi_); |
5015 // If index is a heap number, try converting it to an integer. | 5027 // If index is a heap number, try converting it to an integer. |
5016 __ CheckMap(index_, | 5028 __ CheckMap(index_, |
5017 result_, | 5029 result_, |
5018 Heap::kHeapNumberMapRootIndex, | 5030 Heap::kHeapNumberMapRootIndex, |
5019 index_not_number_, | 5031 index_not_number_, |
5020 DONT_DO_SMI_CHECK); | 5032 DONT_DO_SMI_CHECK); |
5021 call_helper.BeforeCall(masm); | 5033 call_helper.BeforeCall(masm); |
(...skipping 28 matching lines...) Expand all Loading... |
5050 call_helper.BeforeCall(masm); | 5062 call_helper.BeforeCall(masm); |
5051 __ sll(index_, index_, kSmiTagSize); | 5063 __ sll(index_, index_, kSmiTagSize); |
5052 __ Push(object_, index_); | 5064 __ Push(object_, index_); |
5053 __ CallRuntime(Runtime::kStringCharCodeAt, 2); | 5065 __ CallRuntime(Runtime::kStringCharCodeAt, 2); |
5054 | 5066 |
5055 __ Move(result_, v0); | 5067 __ Move(result_, v0); |
5056 | 5068 |
5057 call_helper.AfterCall(masm); | 5069 call_helper.AfterCall(masm); |
5058 __ jmp(&exit_); | 5070 __ jmp(&exit_); |
5059 | 5071 |
5060 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 5072 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); |
5061 } | 5073 } |
5062 | 5074 |
5063 | 5075 |
5064 // ------------------------------------------------------------------------- | 5076 // ------------------------------------------------------------------------- |
5065 // StringCharFromCodeGenerator | 5077 // StringCharFromCodeGenerator |
5066 | 5078 |
5067 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 5079 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
5068 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 5080 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
5069 | 5081 |
5070 ASSERT(!t0.is(result_)); | 5082 ASSERT(!t0.is(result_)); |
(...skipping 16 matching lines...) Expand all Loading... |
5087 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); | 5099 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); |
5088 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); | 5100 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); |
5089 __ Branch(&slow_case_, eq, result_, Operand(t0)); | 5101 __ Branch(&slow_case_, eq, result_, Operand(t0)); |
5090 __ bind(&exit_); | 5102 __ bind(&exit_); |
5091 } | 5103 } |
5092 | 5104 |
5093 | 5105 |
5094 void StringCharFromCodeGenerator::GenerateSlow( | 5106 void StringCharFromCodeGenerator::GenerateSlow( |
5095 MacroAssembler* masm, | 5107 MacroAssembler* masm, |
5096 const RuntimeCallHelper& call_helper) { | 5108 const RuntimeCallHelper& call_helper) { |
5097 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 5109 __ Abort("Unexpected fallthrough to CharFromCode slow case"); |
5098 | 5110 |
5099 __ bind(&slow_case_); | 5111 __ bind(&slow_case_); |
5100 call_helper.BeforeCall(masm); | 5112 call_helper.BeforeCall(masm); |
5101 __ push(code_); | 5113 __ push(code_); |
5102 __ CallRuntime(Runtime::kCharFromCode, 1); | 5114 __ CallRuntime(Runtime::kCharFromCode, 1); |
5103 __ Move(result_, v0); | 5115 __ Move(result_, v0); |
5104 | 5116 |
5105 call_helper.AfterCall(masm); | 5117 call_helper.AfterCall(masm); |
5106 __ Branch(&exit_); | 5118 __ Branch(&exit_); |
5107 | 5119 |
5108 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 5120 __ Abort("Unexpected fallthrough from CharFromCode slow case"); |
5109 } | 5121 } |
5110 | 5122 |
5111 | 5123 |
5112 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | 5124 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, |
5113 Register dest, | 5125 Register dest, |
5114 Register src, | 5126 Register src, |
5115 Register count, | 5127 Register count, |
5116 Register scratch, | 5128 Register scratch, |
5117 bool ascii) { | 5129 bool ascii) { |
5118 Label loop; | 5130 Label loop; |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5153 Register scratch5, | 5165 Register scratch5, |
5154 int flags) { | 5166 int flags) { |
5155 bool ascii = (flags & COPY_ASCII) != 0; | 5167 bool ascii = (flags & COPY_ASCII) != 0; |
5156 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0; | 5168 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0; |
5157 | 5169 |
5158 if (dest_always_aligned && FLAG_debug_code) { | 5170 if (dest_always_aligned && FLAG_debug_code) { |
5159 // Check that destination is actually word aligned if the flag says | 5171 // Check that destination is actually word aligned if the flag says |
5160 // that it is. | 5172 // that it is. |
5161 __ And(scratch4, dest, Operand(kPointerAlignmentMask)); | 5173 __ And(scratch4, dest, Operand(kPointerAlignmentMask)); |
5162 __ Check(eq, | 5174 __ Check(eq, |
5163 kDestinationOfCopyNotAligned, | 5175 "Destination of copy not aligned.", |
5164 scratch4, | 5176 scratch4, |
5165 Operand(zero_reg)); | 5177 Operand(zero_reg)); |
5166 } | 5178 } |
5167 | 5179 |
5168 const int kReadAlignment = 4; | 5180 const int kReadAlignment = 4; |
5169 const int kReadAlignmentMask = kReadAlignment - 1; | 5181 const int kReadAlignmentMask = kReadAlignment - 1; |
5170 // Ensure that reading an entire aligned word containing the last character | 5182 // Ensure that reading an entire aligned word containing the last character |
5171 // of a string will not read outside the allocated area (because we pad up | 5183 // of a string will not read outside the allocated area (because we pad up |
5172 // to kObjectAlignment). | 5184 // to kObjectAlignment). |
5173 STATIC_ASSERT(kObjectAlignment >= kReadAlignment); | 5185 STATIC_ASSERT(kObjectAlignment >= kReadAlignment); |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5353 | 5365 |
5354 // If entry is undefined no string with this hash can be found. | 5366 // If entry is undefined no string with this hash can be found. |
5355 Label is_string; | 5367 Label is_string; |
5356 __ GetObjectType(candidate, scratch, scratch); | 5368 __ GetObjectType(candidate, scratch, scratch); |
5357 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE)); | 5369 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE)); |
5358 | 5370 |
5359 __ Branch(not_found, eq, undefined, Operand(candidate)); | 5371 __ Branch(not_found, eq, undefined, Operand(candidate)); |
5360 // Must be the hole (deleted entry). | 5372 // Must be the hole (deleted entry). |
5361 if (FLAG_debug_code) { | 5373 if (FLAG_debug_code) { |
5362 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); | 5374 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); |
5363 __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole, | 5375 __ Assert(eq, "oddball in string table is not undefined or the hole", |
5364 scratch, Operand(candidate)); | 5376 scratch, Operand(candidate)); |
5365 } | 5377 } |
5366 __ jmp(&next_probe[i]); | 5378 __ jmp(&next_probe[i]); |
5367 | 5379 |
5368 __ bind(&is_string); | 5380 __ bind(&is_string); |
5369 | 5381 |
5370 // Check that the candidate is a non-external ASCII string. The instance | 5382 // Check that the candidate is a non-external ASCII string. The instance |
5371 // type is still in the scratch register from the CompareObjectType | 5383 // type is still in the scratch register from the CompareObjectType |
5372 // operation. | 5384 // operation. |
5373 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); | 5385 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); |
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6561 // No need to pop or drop anything, LeaveExitFrame will restore the old | 6573 // No need to pop or drop anything, LeaveExitFrame will restore the old |
6562 // stack, thus dropping the allocated space for the return value. | 6574 // stack, thus dropping the allocated space for the return value. |
6563 // The saved ra is after the reserved stack space for the 4 args. | 6575 // The saved ra is after the reserved stack space for the 4 args. |
6564 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); | 6576 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); |
6565 | 6577 |
6566 if (FLAG_debug_code && FLAG_enable_slow_asserts) { | 6578 if (FLAG_debug_code && FLAG_enable_slow_asserts) { |
6567 // In case of an error the return address may point to a memory area | 6579 // In case of an error the return address may point to a memory area |
6568 // filled with kZapValue by the GC. | 6580 // filled with kZapValue by the GC. |
6569 // Dereference the address and check for this. | 6581 // Dereference the address and check for this. |
6570 __ lw(t0, MemOperand(t9)); | 6582 __ lw(t0, MemOperand(t9)); |
6571 __ Assert(ne, kReceivedInvalidReturnAddress, t0, | 6583 __ Assert(ne, "Received invalid return address.", t0, |
6572 Operand(reinterpret_cast<uint32_t>(kZapValue))); | 6584 Operand(reinterpret_cast<uint32_t>(kZapValue))); |
6573 } | 6585 } |
6574 __ Jump(t9); | 6586 __ Jump(t9); |
6575 } | 6587 } |
6576 | 6588 |
6577 | 6589 |
6578 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, | 6590 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, |
6579 Register target) { | 6591 Register target) { |
6580 __ Move(t9, target); | 6592 __ Move(t9, target); |
6581 __ AssertStackIsAligned(); | 6593 __ AssertStackIsAligned(); |
(...skipping 730 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7312 for (int i = 0; i <= last_index; ++i) { | 7324 for (int i = 0; i <= last_index; ++i) { |
7313 Label next; | 7325 Label next; |
7314 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 7326 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
7315 __ Branch(&next, ne, a3, Operand(kind)); | 7327 __ Branch(&next, ne, a3, Operand(kind)); |
7316 T stub(kind); | 7328 T stub(kind); |
7317 __ TailCallStub(&stub); | 7329 __ TailCallStub(&stub); |
7318 __ bind(&next); | 7330 __ bind(&next); |
7319 } | 7331 } |
7320 | 7332 |
7321 // If we reached this point there is a problem. | 7333 // If we reached this point there is a problem. |
7322 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 7334 __ Abort("Unexpected ElementsKind in array constructor"); |
7323 } | 7335 } |
7324 | 7336 |
7325 | 7337 |
7326 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { | 7338 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
7327 // a2 - type info cell | 7339 // a2 - type info cell |
7328 // a3 - kind | 7340 // a3 - kind |
7329 // a0 - number of arguments | 7341 // a0 - number of arguments |
7330 // a1 - constructor? | 7342 // a1 - constructor? |
7331 // sp[0] - last argument | 7343 // sp[0] - last argument |
7332 ASSERT(FAST_SMI_ELEMENTS == 0); | 7344 ASSERT(FAST_SMI_ELEMENTS == 0); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7367 for (int i = 0; i <= last_index; ++i) { | 7379 for (int i = 0; i <= last_index; ++i) { |
7368 Label next; | 7380 Label next; |
7369 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 7381 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
7370 __ Branch(&next, ne, a3, Operand(kind)); | 7382 __ Branch(&next, ne, a3, Operand(kind)); |
7371 ArraySingleArgumentConstructorStub stub(kind); | 7383 ArraySingleArgumentConstructorStub stub(kind); |
7372 __ TailCallStub(&stub); | 7384 __ TailCallStub(&stub); |
7373 __ bind(&next); | 7385 __ bind(&next); |
7374 } | 7386 } |
7375 | 7387 |
7376 // If we reached this point there is a problem. | 7388 // If we reached this point there is a problem. |
7377 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 7389 __ Abort("Unexpected ElementsKind in array constructor"); |
7378 } | 7390 } |
7379 | 7391 |
7380 | 7392 |
7381 template<class T> | 7393 template<class T> |
7382 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 7394 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
7383 int to_index = GetSequenceIndexFromFastElementsKind( | 7395 int to_index = GetSequenceIndexFromFastElementsKind( |
7384 TERMINAL_FAST_ELEMENTS_KIND); | 7396 TERMINAL_FAST_ELEMENTS_KIND); |
7385 for (int i = 0; i <= to_index; ++i) { | 7397 for (int i = 0; i <= to_index; ++i) { |
7386 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 7398 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
7387 T stub(kind); | 7399 T stub(kind); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7428 // -- sp[4] : last argument | 7440 // -- sp[4] : last argument |
7429 // ----------------------------------- | 7441 // ----------------------------------- |
7430 if (FLAG_debug_code) { | 7442 if (FLAG_debug_code) { |
7431 // The array construct code is only set for the global and natives | 7443 // The array construct code is only set for the global and natives |
7432 // builtin Array functions which always have maps. | 7444 // builtin Array functions which always have maps. |
7433 | 7445 |
7434 // Initial map for the builtin Array function should be a map. | 7446 // Initial map for the builtin Array function should be a map. |
7435 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 7447 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
7436 // Will both indicate a NULL and a Smi. | 7448 // Will both indicate a NULL and a Smi. |
7437 __ And(at, a3, Operand(kSmiTagMask)); | 7449 __ And(at, a3, Operand(kSmiTagMask)); |
7438 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, | 7450 __ Assert(ne, "Unexpected initial map for Array function", |
7439 at, Operand(zero_reg)); | 7451 at, Operand(zero_reg)); |
7440 __ GetObjectType(a3, a3, t0); | 7452 __ GetObjectType(a3, a3, t0); |
7441 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, | 7453 __ Assert(eq, "Unexpected initial map for Array function", |
7442 t0, Operand(MAP_TYPE)); | 7454 t0, Operand(MAP_TYPE)); |
7443 | 7455 |
7444 // We should either have undefined in a2 or a valid cell. | 7456 // We should either have undefined in a2 or a valid cell. |
7445 Label okay_here; | 7457 Label okay_here; |
7446 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 7458 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
7447 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 7459 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
7448 __ Branch(&okay_here, eq, a2, Operand(at)); | 7460 __ Branch(&okay_here, eq, a2, Operand(at)); |
7449 __ lw(a3, FieldMemOperand(a2, 0)); | 7461 __ lw(a3, FieldMemOperand(a2, 0)); |
7450 __ Assert(eq, kExpectedPropertyCellInRegisterA2, | 7462 __ Assert(eq, "Expected property cell in register a2", |
7451 a3, Operand(cell_map)); | 7463 a3, Operand(cell_map)); |
7452 __ bind(&okay_here); | 7464 __ bind(&okay_here); |
7453 } | 7465 } |
7454 | 7466 |
7455 Label no_info, switch_ready; | 7467 Label no_info, switch_ready; |
7456 // Get the elements kind and case on that. | 7468 // Get the elements kind and case on that. |
7457 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 7469 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
7458 __ Branch(&no_info, eq, a2, Operand(at)); | 7470 __ Branch(&no_info, eq, a2, Operand(at)); |
7459 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 7471 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
7460 | 7472 |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7540 // ----------------------------------- | 7552 // ----------------------------------- |
7541 | 7553 |
7542 if (FLAG_debug_code) { | 7554 if (FLAG_debug_code) { |
7543 // The array construct code is only set for the global and natives | 7555 // The array construct code is only set for the global and natives |
7544 // builtin Array functions which always have maps. | 7556 // builtin Array functions which always have maps. |
7545 | 7557 |
7546 // Initial map for the builtin Array function should be a map. | 7558 // Initial map for the builtin Array function should be a map. |
7547 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 7559 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
7548 // Will both indicate a NULL and a Smi. | 7560 // Will both indicate a NULL and a Smi. |
7549 __ And(at, a3, Operand(kSmiTagMask)); | 7561 __ And(at, a3, Operand(kSmiTagMask)); |
7550 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, | 7562 __ Assert(ne, "Unexpected initial map for Array function", |
7551 at, Operand(zero_reg)); | 7563 at, Operand(zero_reg)); |
7552 __ GetObjectType(a3, a3, t0); | 7564 __ GetObjectType(a3, a3, t0); |
7553 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, | 7565 __ Assert(eq, "Unexpected initial map for Array function", |
7554 t0, Operand(MAP_TYPE)); | 7566 t0, Operand(MAP_TYPE)); |
7555 } | 7567 } |
7556 | 7568 |
7557 // Figure out the right elements kind. | 7569 // Figure out the right elements kind. |
7558 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); | 7570 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
7559 | 7571 |
7560 // Load the map's "bit field 2" into a3. We only need the first byte, | 7572 // Load the map's "bit field 2" into a3. We only need the first byte, |
7561 // but the following bit field extraction takes care of that anyway. | 7573 // but the following bit field extraction takes care of that anyway. |
7562 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); | 7574 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); |
7563 // Retrieve elements_kind from bit field 2. | 7575 // Retrieve elements_kind from bit field 2. |
7564 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount); | 7576 __ Ext(a3, a3, Map::kElementsKindShift, Map::kElementsKindBitCount); |
7565 | 7577 |
7566 if (FLAG_debug_code) { | 7578 if (FLAG_debug_code) { |
7567 Label done; | 7579 Label done; |
7568 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); | 7580 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); |
7569 __ Assert( | 7581 __ Assert( |
7570 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray, | 7582 eq, "Invalid ElementsKind for InternalArray or InternalPackedArray", |
7571 a3, Operand(FAST_HOLEY_ELEMENTS)); | 7583 a3, Operand(FAST_HOLEY_ELEMENTS)); |
7572 __ bind(&done); | 7584 __ bind(&done); |
7573 } | 7585 } |
7574 | 7586 |
7575 Label fast_elements_case; | 7587 Label fast_elements_case; |
7576 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); | 7588 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); |
7577 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 7589 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
7578 | 7590 |
7579 __ bind(&fast_elements_case); | 7591 __ bind(&fast_elements_case); |
7580 GenerateCase(masm, FAST_ELEMENTS); | 7592 GenerateCase(masm, FAST_ELEMENTS); |
7581 } | 7593 } |
7582 | 7594 |
7583 | 7595 |
7584 #undef __ | 7596 #undef __ |
7585 | 7597 |
7586 } } // namespace v8::internal | 7598 } } // namespace v8::internal |
7587 | 7599 |
7588 #endif // V8_TARGET_ARCH_MIPS | 7600 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |