| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 239 } | 239 } |
| 240 | 240 |
| 241 | 241 |
| 242 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( | 242 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( |
| 243 Isolate* isolate, | 243 Isolate* isolate, |
| 244 CodeStubInterfaceDescriptor* descriptor) { | 244 CodeStubInterfaceDescriptor* descriptor) { |
| 245 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); | 245 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); |
| 246 } | 246 } |
| 247 | 247 |
| 248 | 248 |
| 249 void UnaryOpStub::InitializeInterfaceDescriptor( |
| 250 Isolate* isolate, |
| 251 CodeStubInterfaceDescriptor* descriptor) { |
| 252 static Register registers[] = { r0 }; |
| 253 descriptor->register_param_count_ = 1; |
| 254 descriptor->register_params_ = registers; |
| 255 descriptor->deoptimization_handler_ = |
| 256 FUNCTION_ADDR(UnaryOpIC_Miss); |
| 257 } |
| 258 |
| 259 |
| 249 void StoreGlobalStub::InitializeInterfaceDescriptor( | 260 void StoreGlobalStub::InitializeInterfaceDescriptor( |
| 250 Isolate* isolate, | 261 Isolate* isolate, |
| 251 CodeStubInterfaceDescriptor* descriptor) { | 262 CodeStubInterfaceDescriptor* descriptor) { |
| 252 static Register registers[] = { r1, r2, r0 }; | 263 static Register registers[] = { r1, r2, r0 }; |
| 253 descriptor->register_param_count_ = 3; | 264 descriptor->register_param_count_ = 3; |
| 254 descriptor->register_params_ = registers; | 265 descriptor->register_params_ = registers; |
| 255 descriptor->deoptimization_handler_ = | 266 descriptor->deoptimization_handler_ = |
| 256 FUNCTION_ADDR(StoreIC_MissFromStubFailure); | 267 FUNCTION_ADDR(StoreIC_MissFromStubFailure); |
| 257 } | 268 } |
| 258 | 269 |
| (...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 502 __ mov(r2, Operand(Smi::FromInt(length))); | 513 __ mov(r2, Operand(Smi::FromInt(length))); |
| 503 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); | 514 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); |
| 504 | 515 |
| 505 // If this block context is nested in the native context we get a smi | 516 // If this block context is nested in the native context we get a smi |
| 506 // sentinel instead of a function. The block context should get the | 517 // sentinel instead of a function. The block context should get the |
| 507 // canonical empty function of the native context as its closure which | 518 // canonical empty function of the native context as its closure which |
| 508 // we still have to look up. | 519 // we still have to look up. |
| 509 Label after_sentinel; | 520 Label after_sentinel; |
| 510 __ JumpIfNotSmi(r3, &after_sentinel); | 521 __ JumpIfNotSmi(r3, &after_sentinel); |
| 511 if (FLAG_debug_code) { | 522 if (FLAG_debug_code) { |
| 523 const char* message = "Expected 0 as a Smi sentinel"; |
| 512 __ cmp(r3, Operand::Zero()); | 524 __ cmp(r3, Operand::Zero()); |
| 513 __ Assert(eq, kExpected0AsASmiSentinel); | 525 __ Assert(eq, message); |
| 514 } | 526 } |
| 515 __ ldr(r3, GlobalObjectOperand()); | 527 __ ldr(r3, GlobalObjectOperand()); |
| 516 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); | 528 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); |
| 517 __ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX)); | 529 __ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX)); |
| 518 __ bind(&after_sentinel); | 530 __ bind(&after_sentinel); |
| 519 | 531 |
| 520 // Set up the fixed slots, copy the global object from the previous context. | 532 // Set up the fixed slots, copy the global object from the previous context. |
| 521 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 533 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 522 __ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX)); | 534 __ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX)); |
| 523 __ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX)); | 535 __ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX)); |
| (...skipping 3374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3898 // Check that the first argument is a JSRegExp object. | 3910 // Check that the first argument is a JSRegExp object. |
| 3899 __ ldr(r0, MemOperand(sp, kJSRegExpOffset)); | 3911 __ ldr(r0, MemOperand(sp, kJSRegExpOffset)); |
| 3900 __ JumpIfSmi(r0, &runtime); | 3912 __ JumpIfSmi(r0, &runtime); |
| 3901 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); | 3913 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); |
| 3902 __ b(ne, &runtime); | 3914 __ b(ne, &runtime); |
| 3903 | 3915 |
| 3904 // Check that the RegExp has been compiled (data contains a fixed array). | 3916 // Check that the RegExp has been compiled (data contains a fixed array). |
| 3905 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); | 3917 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); |
| 3906 if (FLAG_debug_code) { | 3918 if (FLAG_debug_code) { |
| 3907 __ SmiTst(regexp_data); | 3919 __ SmiTst(regexp_data); |
| 3908 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected); | 3920 __ Check(ne, "Unexpected type for RegExp data, FixedArray expected"); |
| 3909 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); | 3921 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); |
| 3910 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); | 3922 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); |
| 3911 } | 3923 } |
| 3912 | 3924 |
| 3913 // regexp_data: RegExp data (FixedArray) | 3925 // regexp_data: RegExp data (FixedArray) |
| 3914 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 3926 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
| 3915 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); | 3927 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); |
| 3916 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); | 3928 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); |
| 3917 __ b(ne, &runtime); | 3929 __ b(ne, &runtime); |
| 3918 | 3930 |
| 3919 // regexp_data: RegExp data (FixedArray) | 3931 // regexp_data: RegExp data (FixedArray) |
| 3920 // Check that the number of captures fit in the static offsets vector buffer. | 3932 // Check that the number of captures fit in the static offsets vector buffer. |
| (...skipping 321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4242 __ b(gt, ¬_long_external); // Go to (8). | 4254 __ b(gt, ¬_long_external); // Go to (8). |
| 4243 | 4255 |
| 4244 // (7) External string. Make it, offset-wise, look like a sequential string. | 4256 // (7) External string. Make it, offset-wise, look like a sequential string. |
| 4245 __ bind(&external_string); | 4257 __ bind(&external_string); |
| 4246 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); | 4258 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); |
| 4247 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); | 4259 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); |
| 4248 if (FLAG_debug_code) { | 4260 if (FLAG_debug_code) { |
| 4249 // Assert that we do not have a cons or slice (indirect strings) here. | 4261 // Assert that we do not have a cons or slice (indirect strings) here. |
| 4250 // Sequential strings have already been ruled out. | 4262 // Sequential strings have already been ruled out. |
| 4251 __ tst(r0, Operand(kIsIndirectStringMask)); | 4263 __ tst(r0, Operand(kIsIndirectStringMask)); |
| 4252 __ Assert(eq, kExternalStringExpectedButNotFound); | 4264 __ Assert(eq, "external string expected, but not found"); |
| 4253 } | 4265 } |
| 4254 __ ldr(subject, | 4266 __ ldr(subject, |
| 4255 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); | 4267 FieldMemOperand(subject, ExternalString::kResourceDataOffset)); |
| 4256 // Move the pointer so that offset-wise, it looks like a sequential string. | 4268 // Move the pointer so that offset-wise, it looks like a sequential string. |
| 4257 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | 4269 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); |
| 4258 __ sub(subject, | 4270 __ sub(subject, |
| 4259 subject, | 4271 subject, |
| 4260 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 4272 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
| 4261 __ jmp(&seq_string); // Go to (5). | 4273 __ jmp(&seq_string); // Go to (5). |
| 4262 | 4274 |
| (...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4624 &call_runtime_); | 4636 &call_runtime_); |
| 4625 | 4637 |
| 4626 __ SmiTag(result_); | 4638 __ SmiTag(result_); |
| 4627 __ bind(&exit_); | 4639 __ bind(&exit_); |
| 4628 } | 4640 } |
| 4629 | 4641 |
| 4630 | 4642 |
| 4631 void StringCharCodeAtGenerator::GenerateSlow( | 4643 void StringCharCodeAtGenerator::GenerateSlow( |
| 4632 MacroAssembler* masm, | 4644 MacroAssembler* masm, |
| 4633 const RuntimeCallHelper& call_helper) { | 4645 const RuntimeCallHelper& call_helper) { |
| 4634 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); | 4646 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); |
| 4635 | 4647 |
| 4636 // Index is not a smi. | 4648 // Index is not a smi. |
| 4637 __ bind(&index_not_smi_); | 4649 __ bind(&index_not_smi_); |
| 4638 // If index is a heap number, try converting it to an integer. | 4650 // If index is a heap number, try converting it to an integer. |
| 4639 __ CheckMap(index_, | 4651 __ CheckMap(index_, |
| 4640 result_, | 4652 result_, |
| 4641 Heap::kHeapNumberMapRootIndex, | 4653 Heap::kHeapNumberMapRootIndex, |
| 4642 index_not_number_, | 4654 index_not_number_, |
| 4643 DONT_DO_SMI_CHECK); | 4655 DONT_DO_SMI_CHECK); |
| 4644 call_helper.BeforeCall(masm); | 4656 call_helper.BeforeCall(masm); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 4669 // is too complex (e.g., when the string needs to be flattened). | 4681 // is too complex (e.g., when the string needs to be flattened). |
| 4670 __ bind(&call_runtime_); | 4682 __ bind(&call_runtime_); |
| 4671 call_helper.BeforeCall(masm); | 4683 call_helper.BeforeCall(masm); |
| 4672 __ SmiTag(index_); | 4684 __ SmiTag(index_); |
| 4673 __ Push(object_, index_); | 4685 __ Push(object_, index_); |
| 4674 __ CallRuntime(Runtime::kStringCharCodeAt, 2); | 4686 __ CallRuntime(Runtime::kStringCharCodeAt, 2); |
| 4675 __ Move(result_, r0); | 4687 __ Move(result_, r0); |
| 4676 call_helper.AfterCall(masm); | 4688 call_helper.AfterCall(masm); |
| 4677 __ jmp(&exit_); | 4689 __ jmp(&exit_); |
| 4678 | 4690 |
| 4679 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 4691 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); |
| 4680 } | 4692 } |
| 4681 | 4693 |
| 4682 | 4694 |
| 4683 // ------------------------------------------------------------------------- | 4695 // ------------------------------------------------------------------------- |
| 4684 // StringCharFromCodeGenerator | 4696 // StringCharFromCodeGenerator |
| 4685 | 4697 |
| 4686 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 4698 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
| 4687 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 4699 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
| 4688 STATIC_ASSERT(kSmiTag == 0); | 4700 STATIC_ASSERT(kSmiTag == 0); |
| 4689 STATIC_ASSERT(kSmiShiftSize == 0); | 4701 STATIC_ASSERT(kSmiShiftSize == 0); |
| 4690 ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1)); | 4702 ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1)); |
| 4691 __ tst(code_, | 4703 __ tst(code_, |
| 4692 Operand(kSmiTagMask | | 4704 Operand(kSmiTagMask | |
| 4693 ((~String::kMaxOneByteCharCode) << kSmiTagSize))); | 4705 ((~String::kMaxOneByteCharCode) << kSmiTagSize))); |
| 4694 __ b(ne, &slow_case_); | 4706 __ b(ne, &slow_case_); |
| 4695 | 4707 |
| 4696 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); | 4708 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); |
| 4697 // At this point code register contains smi tagged ASCII char code. | 4709 // At this point code register contains smi tagged ASCII char code. |
| 4698 __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_)); | 4710 __ add(result_, result_, Operand::PointerOffsetFromSmiKey(code_)); |
| 4699 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); | 4711 __ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); |
| 4700 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); | 4712 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); |
| 4701 __ b(eq, &slow_case_); | 4713 __ b(eq, &slow_case_); |
| 4702 __ bind(&exit_); | 4714 __ bind(&exit_); |
| 4703 } | 4715 } |
| 4704 | 4716 |
| 4705 | 4717 |
| 4706 void StringCharFromCodeGenerator::GenerateSlow( | 4718 void StringCharFromCodeGenerator::GenerateSlow( |
| 4707 MacroAssembler* masm, | 4719 MacroAssembler* masm, |
| 4708 const RuntimeCallHelper& call_helper) { | 4720 const RuntimeCallHelper& call_helper) { |
| 4709 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 4721 __ Abort("Unexpected fallthrough to CharFromCode slow case"); |
| 4710 | 4722 |
| 4711 __ bind(&slow_case_); | 4723 __ bind(&slow_case_); |
| 4712 call_helper.BeforeCall(masm); | 4724 call_helper.BeforeCall(masm); |
| 4713 __ push(code_); | 4725 __ push(code_); |
| 4714 __ CallRuntime(Runtime::kCharFromCode, 1); | 4726 __ CallRuntime(Runtime::kCharFromCode, 1); |
| 4715 __ Move(result_, r0); | 4727 __ Move(result_, r0); |
| 4716 call_helper.AfterCall(masm); | 4728 call_helper.AfterCall(masm); |
| 4717 __ jmp(&exit_); | 4729 __ jmp(&exit_); |
| 4718 | 4730 |
| 4719 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 4731 __ Abort("Unexpected fallthrough from CharFromCode slow case"); |
| 4720 } | 4732 } |
| 4721 | 4733 |
| 4722 | 4734 |
| 4723 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | 4735 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, |
| 4724 Register dest, | 4736 Register dest, |
| 4725 Register src, | 4737 Register src, |
| 4726 Register count, | 4738 Register count, |
| 4727 Register scratch, | 4739 Register scratch, |
| 4728 bool ascii) { | 4740 bool ascii) { |
| 4729 Label loop; | 4741 Label loop; |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4766 Register scratch4, | 4778 Register scratch4, |
| 4767 Register scratch5, | 4779 Register scratch5, |
| 4768 int flags) { | 4780 int flags) { |
| 4769 bool ascii = (flags & COPY_ASCII) != 0; | 4781 bool ascii = (flags & COPY_ASCII) != 0; |
| 4770 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0; | 4782 bool dest_always_aligned = (flags & DEST_ALWAYS_ALIGNED) != 0; |
| 4771 | 4783 |
| 4772 if (dest_always_aligned && FLAG_debug_code) { | 4784 if (dest_always_aligned && FLAG_debug_code) { |
| 4773 // Check that destination is actually word aligned if the flag says | 4785 // Check that destination is actually word aligned if the flag says |
| 4774 // that it is. | 4786 // that it is. |
| 4775 __ tst(dest, Operand(kPointerAlignmentMask)); | 4787 __ tst(dest, Operand(kPointerAlignmentMask)); |
| 4776 __ Check(eq, kDestinationOfCopyNotAligned); | 4788 __ Check(eq, "Destination of copy not aligned."); |
| 4777 } | 4789 } |
| 4778 | 4790 |
| 4779 const int kReadAlignment = 4; | 4791 const int kReadAlignment = 4; |
| 4780 const int kReadAlignmentMask = kReadAlignment - 1; | 4792 const int kReadAlignmentMask = kReadAlignment - 1; |
| 4781 // Ensure that reading an entire aligned word containing the last character | 4793 // Ensure that reading an entire aligned word containing the last character |
| 4782 // of a string will not read outside the allocated area (because we pad up | 4794 // of a string will not read outside the allocated area (because we pad up |
| 4783 // to kObjectAlignment). | 4795 // to kObjectAlignment). |
| 4784 STATIC_ASSERT(kObjectAlignment >= kReadAlignment); | 4796 STATIC_ASSERT(kObjectAlignment >= kReadAlignment); |
| 4785 // Assumes word reads and writes are little endian. | 4797 // Assumes word reads and writes are little endian. |
| 4786 // Nothing to do for zero characters. | 4798 // Nothing to do for zero characters. |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4995 Label is_string; | 5007 Label is_string; |
| 4996 __ CompareObjectType(candidate, scratch, scratch, ODDBALL_TYPE); | 5008 __ CompareObjectType(candidate, scratch, scratch, ODDBALL_TYPE); |
| 4997 __ b(ne, &is_string); | 5009 __ b(ne, &is_string); |
| 4998 | 5010 |
| 4999 __ cmp(undefined, candidate); | 5011 __ cmp(undefined, candidate); |
| 5000 __ b(eq, not_found); | 5012 __ b(eq, not_found); |
| 5001 // Must be the hole (deleted entry). | 5013 // Must be the hole (deleted entry). |
| 5002 if (FLAG_debug_code) { | 5014 if (FLAG_debug_code) { |
| 5003 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 5015 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 5004 __ cmp(ip, candidate); | 5016 __ cmp(ip, candidate); |
| 5005 __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole); | 5017 __ Assert(eq, "oddball in string table is not undefined or the hole"); |
| 5006 } | 5018 } |
| 5007 __ jmp(&next_probe[i]); | 5019 __ jmp(&next_probe[i]); |
| 5008 | 5020 |
| 5009 __ bind(&is_string); | 5021 __ bind(&is_string); |
| 5010 | 5022 |
| 5011 // Check that the candidate is a non-external ASCII string. The instance | 5023 // Check that the candidate is a non-external ASCII string. The instance |
| 5012 // type is still in the scratch register from the CompareObjectType | 5024 // type is still in the scratch register from the CompareObjectType |
| 5013 // operation. | 5025 // operation. |
| 5014 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); | 5026 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); |
| 5015 | 5027 |
| (...skipping 1877 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6893 Label next; | 6905 Label next; |
| 6894 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6906 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 6895 __ cmp(r3, Operand(kind)); | 6907 __ cmp(r3, Operand(kind)); |
| 6896 __ b(ne, &next); | 6908 __ b(ne, &next); |
| 6897 T stub(kind); | 6909 T stub(kind); |
| 6898 __ TailCallStub(&stub); | 6910 __ TailCallStub(&stub); |
| 6899 __ bind(&next); | 6911 __ bind(&next); |
| 6900 } | 6912 } |
| 6901 | 6913 |
| 6902 // If we reached this point there is a problem. | 6914 // If we reached this point there is a problem. |
| 6903 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 6915 __ Abort("Unexpected ElementsKind in array constructor"); |
| 6904 } | 6916 } |
| 6905 | 6917 |
| 6906 | 6918 |
| 6907 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { | 6919 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
| 6908 // r2 - type info cell | 6920 // r2 - type info cell |
| 6909 // r3 - kind | 6921 // r3 - kind |
| 6910 // r0 - number of arguments | 6922 // r0 - number of arguments |
| 6911 // r1 - constructor? | 6923 // r1 - constructor? |
| 6912 // sp[0] - last argument | 6924 // sp[0] - last argument |
| 6913 ASSERT(FAST_SMI_ELEMENTS == 0); | 6925 ASSERT(FAST_SMI_ELEMENTS == 0); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6950 Label next; | 6962 Label next; |
| 6951 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6963 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 6952 __ cmp(r3, Operand(kind)); | 6964 __ cmp(r3, Operand(kind)); |
| 6953 __ b(ne, &next); | 6965 __ b(ne, &next); |
| 6954 ArraySingleArgumentConstructorStub stub(kind); | 6966 ArraySingleArgumentConstructorStub stub(kind); |
| 6955 __ TailCallStub(&stub); | 6967 __ TailCallStub(&stub); |
| 6956 __ bind(&next); | 6968 __ bind(&next); |
| 6957 } | 6969 } |
| 6958 | 6970 |
| 6959 // If we reached this point there is a problem. | 6971 // If we reached this point there is a problem. |
| 6960 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 6972 __ Abort("Unexpected ElementsKind in array constructor"); |
| 6961 } | 6973 } |
| 6962 | 6974 |
| 6963 | 6975 |
| 6964 template<class T> | 6976 template<class T> |
| 6965 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 6977 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
| 6966 int to_index = GetSequenceIndexFromFastElementsKind( | 6978 int to_index = GetSequenceIndexFromFastElementsKind( |
| 6967 TERMINAL_FAST_ELEMENTS_KIND); | 6979 TERMINAL_FAST_ELEMENTS_KIND); |
| 6968 for (int i = 0; i <= to_index; ++i) { | 6980 for (int i = 0; i <= to_index; ++i) { |
| 6969 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6981 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
| 6970 T stub(kind); | 6982 T stub(kind); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7011 // -- sp[4] : last argument | 7023 // -- sp[4] : last argument |
| 7012 // ----------------------------------- | 7024 // ----------------------------------- |
| 7013 if (FLAG_debug_code) { | 7025 if (FLAG_debug_code) { |
| 7014 // The array construct code is only set for the global and natives | 7026 // The array construct code is only set for the global and natives |
| 7015 // builtin Array functions which always have maps. | 7027 // builtin Array functions which always have maps. |
| 7016 | 7028 |
| 7017 // Initial map for the builtin Array function should be a map. | 7029 // Initial map for the builtin Array function should be a map. |
| 7018 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 7030 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 7019 // Will both indicate a NULL and a Smi. | 7031 // Will both indicate a NULL and a Smi. |
| 7020 __ tst(r3, Operand(kSmiTagMask)); | 7032 __ tst(r3, Operand(kSmiTagMask)); |
| 7021 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | 7033 __ Assert(ne, "Unexpected initial map for Array function"); |
| 7022 __ CompareObjectType(r3, r3, r4, MAP_TYPE); | 7034 __ CompareObjectType(r3, r3, r4, MAP_TYPE); |
| 7023 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 7035 __ Assert(eq, "Unexpected initial map for Array function"); |
| 7024 | 7036 |
| 7025 // We should either have undefined in ebx or a valid cell | 7037 // We should either have undefined in ebx or a valid cell |
| 7026 Label okay_here; | 7038 Label okay_here; |
| 7027 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 7039 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
| 7028 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); | 7040 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
| 7029 __ b(eq, &okay_here); | 7041 __ b(eq, &okay_here); |
| 7030 __ ldr(r3, FieldMemOperand(r2, 0)); | 7042 __ ldr(r3, FieldMemOperand(r2, 0)); |
| 7031 __ cmp(r3, Operand(cell_map)); | 7043 __ cmp(r3, Operand(cell_map)); |
| 7032 __ Assert(eq, kExpectedPropertyCellInRegisterEbx); | 7044 __ Assert(eq, "Expected property cell in register ebx"); |
| 7033 __ bind(&okay_here); | 7045 __ bind(&okay_here); |
| 7034 } | 7046 } |
| 7035 | 7047 |
| 7036 Label no_info, switch_ready; | 7048 Label no_info, switch_ready; |
| 7037 // Get the elements kind and case on that. | 7049 // Get the elements kind and case on that. |
| 7038 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); | 7050 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
| 7039 __ b(eq, &no_info); | 7051 __ b(eq, &no_info); |
| 7040 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); | 7052 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); |
| 7041 | 7053 |
| 7042 // The type cell may have undefined in its value. | 7054 // The type cell may have undefined in its value. |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7125 // ----------------------------------- | 7137 // ----------------------------------- |
| 7126 | 7138 |
| 7127 if (FLAG_debug_code) { | 7139 if (FLAG_debug_code) { |
| 7128 // The array construct code is only set for the global and natives | 7140 // The array construct code is only set for the global and natives |
| 7129 // builtin Array functions which always have maps. | 7141 // builtin Array functions which always have maps. |
| 7130 | 7142 |
| 7131 // Initial map for the builtin Array function should be a map. | 7143 // Initial map for the builtin Array function should be a map. |
| 7132 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 7144 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 7133 // Will both indicate a NULL and a Smi. | 7145 // Will both indicate a NULL and a Smi. |
| 7134 __ tst(r3, Operand(kSmiTagMask)); | 7146 __ tst(r3, Operand(kSmiTagMask)); |
| 7135 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); | 7147 __ Assert(ne, "Unexpected initial map for Array function"); |
| 7136 __ CompareObjectType(r3, r3, r4, MAP_TYPE); | 7148 __ CompareObjectType(r3, r3, r4, MAP_TYPE); |
| 7137 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); | 7149 __ Assert(eq, "Unexpected initial map for Array function"); |
| 7138 } | 7150 } |
| 7139 | 7151 |
| 7140 // Figure out the right elements kind | 7152 // Figure out the right elements kind |
| 7141 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); | 7153 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); |
| 7142 // Load the map's "bit field 2" into |result|. We only need the first byte, | 7154 // Load the map's "bit field 2" into |result|. We only need the first byte, |
| 7143 // but the following bit field extraction takes care of that anyway. | 7155 // but the following bit field extraction takes care of that anyway. |
| 7144 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset)); | 7156 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset)); |
| 7145 // Retrieve elements_kind from bit field 2. | 7157 // Retrieve elements_kind from bit field 2. |
| 7146 __ Ubfx(r3, r3, Map::kElementsKindShift, Map::kElementsKindBitCount); | 7158 __ Ubfx(r3, r3, Map::kElementsKindShift, Map::kElementsKindBitCount); |
| 7147 | 7159 |
| 7148 if (FLAG_debug_code) { | 7160 if (FLAG_debug_code) { |
| 7149 Label done; | 7161 Label done; |
| 7150 __ cmp(r3, Operand(FAST_ELEMENTS)); | 7162 __ cmp(r3, Operand(FAST_ELEMENTS)); |
| 7151 __ b(eq, &done); | 7163 __ b(eq, &done); |
| 7152 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS)); | 7164 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS)); |
| 7153 __ Assert(eq, | 7165 __ Assert(eq, |
| 7154 kInvalidElementsKindForInternalArrayOrInternalPackedArray); | 7166 "Invalid ElementsKind for InternalArray or InternalPackedArray"); |
| 7155 __ bind(&done); | 7167 __ bind(&done); |
| 7156 } | 7168 } |
| 7157 | 7169 |
| 7158 Label fast_elements_case; | 7170 Label fast_elements_case; |
| 7159 __ cmp(r3, Operand(FAST_ELEMENTS)); | 7171 __ cmp(r3, Operand(FAST_ELEMENTS)); |
| 7160 __ b(eq, &fast_elements_case); | 7172 __ b(eq, &fast_elements_case); |
| 7161 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 7173 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
| 7162 | 7174 |
| 7163 __ bind(&fast_elements_case); | 7175 __ bind(&fast_elements_case); |
| 7164 GenerateCase(masm, FAST_ELEMENTS); | 7176 GenerateCase(masm, FAST_ELEMENTS); |
| 7165 } | 7177 } |
| 7166 | 7178 |
| 7167 | 7179 |
| 7168 #undef __ | 7180 #undef __ |
| 7169 | 7181 |
| 7170 } } // namespace v8::internal | 7182 } } // namespace v8::internal |
| 7171 | 7183 |
| 7172 #endif // V8_TARGET_ARCH_ARM | 7184 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |