| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 976 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 987 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset)); | 987 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset)); |
| 988 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset)); | 988 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset)); |
| 989 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset)); | 989 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset)); |
| 990 __ and_(a0, a2, a3); | 990 __ and_(a0, a2, a3); |
| 991 __ And(a0, a0, Operand(1 << Map::kIsUndetectable)); | 991 __ And(a0, a0, Operand(1 << Map::kIsUndetectable)); |
| 992 __ Ret(USE_DELAY_SLOT); | 992 __ Ret(USE_DELAY_SLOT); |
| 993 __ xori(v0, a0, 1 << Map::kIsUndetectable); | 993 __ xori(v0, a0, 1 << Map::kIsUndetectable); |
| 994 } | 994 } |
| 995 | 995 |
| 996 | 996 |
| 997 void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, | |
| 998 Register object, | |
| 999 Register result, | |
| 1000 Register scratch1, | |
| 1001 Register scratch2, | |
| 1002 Register scratch3, | |
| 1003 Label* not_found) { | |
| 1004 // Use of registers. Register result is used as a temporary. | |
| 1005 Register number_string_cache = result; | |
| 1006 Register mask = scratch3; | |
| 1007 | |
| 1008 // Load the number string cache. | |
| 1009 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); | |
| 1010 | |
| 1011 // Make the hash mask from the length of the number string cache. It | |
| 1012 // contains two elements (number and string) for each cache entry. | |
| 1013 __ lw(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset)); | |
| 1014 // Divide length by two (length is a smi). | |
| 1015 __ sra(mask, mask, kSmiTagSize + 1); | |
| 1016 __ Addu(mask, mask, -1); // Make mask. | |
| 1017 | |
| 1018 // Calculate the entry in the number string cache. The hash value in the | |
| 1019 // number string cache for smis is just the smi value, and the hash for | |
| 1020 // doubles is the xor of the upper and lower words. See | |
| 1021 // Heap::GetNumberStringCache. | |
| 1022 Isolate* isolate = masm->isolate(); | |
| 1023 Label is_smi; | |
| 1024 Label load_result_from_cache; | |
| 1025 __ JumpIfSmi(object, &is_smi); | |
| 1026 __ CheckMap(object, | |
| 1027 scratch1, | |
| 1028 Heap::kHeapNumberMapRootIndex, | |
| 1029 not_found, | |
| 1030 DONT_DO_SMI_CHECK); | |
| 1031 | |
| 1032 STATIC_ASSERT(8 == kDoubleSize); | |
| 1033 __ Addu(scratch1, | |
| 1034 object, | |
| 1035 Operand(HeapNumber::kValueOffset - kHeapObjectTag)); | |
| 1036 __ lw(scratch2, MemOperand(scratch1, kPointerSize)); | |
| 1037 __ lw(scratch1, MemOperand(scratch1, 0)); | |
| 1038 __ Xor(scratch1, scratch1, Operand(scratch2)); | |
| 1039 __ And(scratch1, scratch1, Operand(mask)); | |
| 1040 | |
| 1041 // Calculate address of entry in string cache: each entry consists | |
| 1042 // of two pointer sized fields. | |
| 1043 __ sll(scratch1, scratch1, kPointerSizeLog2 + 1); | |
| 1044 __ Addu(scratch1, number_string_cache, scratch1); | |
| 1045 | |
| 1046 Register probe = mask; | |
| 1047 __ lw(probe, | |
| 1048 FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | |
| 1049 __ JumpIfSmi(probe, not_found); | |
| 1050 __ ldc1(f12, FieldMemOperand(object, HeapNumber::kValueOffset)); | |
| 1051 __ ldc1(f14, FieldMemOperand(probe, HeapNumber::kValueOffset)); | |
| 1052 __ BranchF(&load_result_from_cache, NULL, eq, f12, f14); | |
| 1053 __ Branch(not_found); | |
| 1054 | |
| 1055 __ bind(&is_smi); | |
| 1056 Register scratch = scratch1; | |
| 1057 __ sra(scratch, object, 1); // Shift away the tag. | |
| 1058 __ And(scratch, mask, Operand(scratch)); | |
| 1059 | |
| 1060 // Calculate address of entry in string cache: each entry consists | |
| 1061 // of two pointer sized fields. | |
| 1062 __ sll(scratch, scratch, kPointerSizeLog2 + 1); | |
| 1063 __ Addu(scratch, number_string_cache, scratch); | |
| 1064 | |
| 1065 // Check if the entry is the smi we are looking for. | |
| 1066 __ lw(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize)); | |
| 1067 __ Branch(not_found, ne, object, Operand(probe)); | |
| 1068 | |
| 1069 // Get the result from the cache. | |
| 1070 __ bind(&load_result_from_cache); | |
| 1071 __ lw(result, | |
| 1072 FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize)); | |
| 1073 | |
| 1074 __ IncrementCounter(isolate->counters()->number_to_string_native(), | |
| 1075 1, | |
| 1076 scratch1, | |
| 1077 scratch2); | |
| 1078 } | |
| 1079 | |
| 1080 | |
| 1081 void NumberToStringStub::Generate(MacroAssembler* masm) { | 997 void NumberToStringStub::Generate(MacroAssembler* masm) { |
| 1082 Label runtime; | 998 Label runtime; |
| 1083 | 999 |
| 1084 __ lw(a1, MemOperand(sp, 0)); | 1000 __ lw(a1, MemOperand(sp, 0)); |
| 1085 | 1001 |
| 1086 // Generate code to lookup number in the number string cache. | 1002 // Generate code to lookup number in the number string cache. |
| 1087 GenerateLookupNumberStringCache(masm, a1, v0, a2, a3, t0, &runtime); | 1003 __ LookupNumberStringCache(a1, v0, a2, a3, t0, &runtime); |
| 1088 __ DropAndRet(1); | 1004 __ DropAndRet(1); |
| 1089 | 1005 |
| 1090 __ bind(&runtime); | 1006 __ bind(&runtime); |
| 1091 // Handle number to string in the runtime system if not found in the cache. | 1007 // Handle number to string in the runtime system if not found in the cache. |
| 1092 __ TailCallRuntime(Runtime::kNumberToString, 1, 1); | 1008 __ TailCallRuntime(Runtime::kNumberToString, 1, 1); |
| 1093 } | 1009 } |
| 1094 | 1010 |
| 1095 | 1011 |
| 1096 static void ICCompareStub_CheckInputType(MacroAssembler* masm, | 1012 static void ICCompareStub_CheckInputType(MacroAssembler* masm, |
| 1097 Register input, | 1013 Register input, |
| (...skipping 4758 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5856 Label* slow) { | 5772 Label* slow) { |
| 5857 // First check if the argument is already a string. | 5773 // First check if the argument is already a string. |
| 5858 Label not_string, done; | 5774 Label not_string, done; |
| 5859 __ JumpIfSmi(arg, ¬_string); | 5775 __ JumpIfSmi(arg, ¬_string); |
| 5860 __ GetObjectType(arg, scratch1, scratch1); | 5776 __ GetObjectType(arg, scratch1, scratch1); |
| 5861 __ Branch(&done, lt, scratch1, Operand(FIRST_NONSTRING_TYPE)); | 5777 __ Branch(&done, lt, scratch1, Operand(FIRST_NONSTRING_TYPE)); |
| 5862 | 5778 |
| 5863 // Check the number to string cache. | 5779 // Check the number to string cache. |
| 5864 __ bind(¬_string); | 5780 __ bind(¬_string); |
| 5865 // Puts the cached result into scratch1. | 5781 // Puts the cached result into scratch1. |
| 5866 NumberToStringStub::GenerateLookupNumberStringCache(masm, | 5782 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, scratch4, slow); |
| 5867 arg, | |
| 5868 scratch1, | |
| 5869 scratch2, | |
| 5870 scratch3, | |
| 5871 scratch4, | |
| 5872 slow); | |
| 5873 __ mov(arg, scratch1); | 5783 __ mov(arg, scratch1); |
| 5874 __ sw(arg, MemOperand(sp, stack_offset)); | 5784 __ sw(arg, MemOperand(sp, stack_offset)); |
| 5875 __ bind(&done); | 5785 __ bind(&done); |
| 5876 } | 5786 } |
| 5877 | 5787 |
| 5878 | 5788 |
| 5879 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 5789 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 5880 ASSERT(state_ == CompareIC::SMI); | 5790 ASSERT(state_ == CompareIC::SMI); |
| 5881 Label miss; | 5791 Label miss; |
| 5882 __ Or(a2, a1, a0); | 5792 __ Or(a2, a1, a0); |
| (...skipping 1398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7281 __ bind(&fast_elements_case); | 7191 __ bind(&fast_elements_case); |
| 7282 GenerateCase(masm, FAST_ELEMENTS); | 7192 GenerateCase(masm, FAST_ELEMENTS); |
| 7283 } | 7193 } |
| 7284 | 7194 |
| 7285 | 7195 |
| 7286 #undef __ | 7196 #undef __ |
| 7287 | 7197 |
| 7288 } } // namespace v8::internal | 7198 } } // namespace v8::internal |
| 7289 | 7199 |
| 7290 #endif // V8_TARGET_ARCH_MIPS | 7200 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |