Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(110)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/codegen-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after
255 } 255 }
256 256
257 257
258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
259 // Stack layout on entry: 259 // Stack layout on entry:
260 // [sp]: constant elements. 260 // [sp]: constant elements.
261 // [sp + kPointerSize]: literal index. 261 // [sp + kPointerSize]: literal index.
262 // [sp + (2 * kPointerSize)]: literals array. 262 // [sp + (2 * kPointerSize)]: literals array.
263 263
264 // All sizes here are multiples of kPointerSize. 264 // All sizes here are multiples of kPointerSize.
265 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; 265 int elements_size = 0;
266 if (length_ > 0) {
267 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
268 ? FixedDoubleArray::SizeFor(length_)
269 : FixedArray::SizeFor(length_);
270 }
266 int size = JSArray::kSize + elements_size; 271 int size = JSArray::kSize + elements_size;
267 272
268 // Load boilerplate object into r3 and check if we need to create a 273 // Load boilerplate object into r3 and check if we need to create a
269 // boilerplate. 274 // boilerplate.
270 Label slow_case; 275 Label slow_case;
271 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); 276 __ lw(a3, MemOperand(sp, 2 * kPointerSize));
272 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); 277 __ lw(a0, MemOperand(sp, 1 * kPointerSize));
273 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 278 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
274 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); 279 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
275 __ Addu(t0, a3, t0); 280 __ Addu(t0, a3, t0);
276 __ lw(a3, MemOperand(t0)); 281 __ lw(a3, MemOperand(t0));
277 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); 282 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
278 __ Branch(&slow_case, eq, a3, Operand(t1)); 283 __ Branch(&slow_case, eq, a3, Operand(t1));
279 284
280 if (FLAG_debug_code) { 285 if (FLAG_debug_code) {
281 const char* message; 286 const char* message;
282 Heap::RootListIndex expected_map_index; 287 Heap::RootListIndex expected_map_index;
283 if (mode_ == CLONE_ELEMENTS) { 288 if (mode_ == CLONE_ELEMENTS) {
284 message = "Expected (writable) fixed array"; 289 message = "Expected (writable) fixed array";
285 expected_map_index = Heap::kFixedArrayMapRootIndex; 290 expected_map_index = Heap::kFixedArrayMapRootIndex;
291 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
292 message = "Expected (writable) fixed double array";
293 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
286 } else { 294 } else {
287 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); 295 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
288 message = "Expected copy-on-write fixed array"; 296 message = "Expected copy-on-write fixed array";
289 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; 297 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
290 } 298 }
291 __ push(a3); 299 __ push(a3);
292 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); 300 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
293 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); 301 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset));
294 __ LoadRoot(at, expected_map_index); 302 __ LoadRoot(at, expected_map_index);
295 __ Assert(eq, message, a3, Operand(at)); 303 __ Assert(eq, message, a3, Operand(at));
(...skipping 19 matching lines...) Expand all
315 } 323 }
316 324
317 if (length_ > 0) { 325 if (length_ > 0) {
318 // Get hold of the elements array of the boilerplate and setup the 326 // Get hold of the elements array of the boilerplate and setup the
319 // elements pointer in the resulting object. 327 // elements pointer in the resulting object.
320 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); 328 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
321 __ Addu(a2, v0, Operand(JSArray::kSize)); 329 __ Addu(a2, v0, Operand(JSArray::kSize));
322 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); 330 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
323 331
324 // Copy the elements array. 332 // Copy the elements array.
333 ASSERT((elements_size % kPointerSize) == 0);
325 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); 334 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize);
326 } 335 }
327 336
328 // Return and remove the on-stack parameters. 337 // Return and remove the on-stack parameters.
329 __ Addu(sp, sp, Operand(3 * kPointerSize)); 338 __ Addu(sp, sp, Operand(3 * kPointerSize));
330 __ Ret(); 339 __ Ret();
331 340
332 __ bind(&slow_case); 341 __ bind(&slow_case);
333 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 342 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
334 } 343 }
(...skipping 3729 matching lines...) Expand 10 before | Expand all | Expand 10 after
4064 __ Branch(&miss, ne, function, Operand(at)); 4073 __ Branch(&miss, ne, function, Operand(at));
4065 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex); 4074 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
4066 __ Branch(&miss, ne, map, Operand(at)); 4075 __ Branch(&miss, ne, map, Operand(at));
4067 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); 4076 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
4068 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); 4077 __ DropAndRet(HasArgsInRegisters() ? 0 : 2);
4069 4078
4070 __ bind(&miss); 4079 __ bind(&miss);
4071 } 4080 }
4072 4081
4073 // Get the prototype of the function. 4082 // Get the prototype of the function.
4074 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 4083 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
4075 4084
4076 // Check that the function prototype is a JS object. 4085 // Check that the function prototype is a JS object.
4077 __ JumpIfSmi(prototype, &slow); 4086 __ JumpIfSmi(prototype, &slow);
4078 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 4087 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
4079 4088
4080 // Update the global instanceof or call site inlined cache with the current 4089 // Update the global instanceof or call site inlined cache with the current
4081 // map and function. The cached answer will be set when it is known below. 4090 // map and function. The cached answer will be set when it is known below.
4082 if (!HasCallSiteInlineCheck()) { 4091 if (!HasCallSiteInlineCheck()) {
4083 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 4092 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
4084 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); 4093 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
(...skipping 2797 matching lines...) Expand 10 before | Expand all | Expand 10 after
6882 __ Jump(a2); 6891 __ Jump(a2);
6883 } 6892 }
6884 6893
6885 6894
6886 void DirectCEntryStub::Generate(MacroAssembler* masm) { 6895 void DirectCEntryStub::Generate(MacroAssembler* masm) {
6887 // No need to pop or drop anything, LeaveExitFrame will restore the old 6896 // No need to pop or drop anything, LeaveExitFrame will restore the old
6888 // stack, thus dropping the allocated space for the return value. 6897 // stack, thus dropping the allocated space for the return value.
6889 // The saved ra is after the reserved stack space for the 4 args. 6898 // The saved ra is after the reserved stack space for the 4 args.
6890 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); 6899 __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
6891 6900
6892 if (FLAG_debug_code && EnableSlowAsserts()) { 6901 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
6893 // In case of an error the return address may point to a memory area 6902 // In case of an error the return address may point to a memory area
6894 // filled with kZapValue by the GC. 6903 // filled with kZapValue by the GC.
6895 // Dereference the address and check for this. 6904 // Dereference the address and check for this.
6896 __ lw(t0, MemOperand(t9)); 6905 __ lw(t0, MemOperand(t9));
6897 __ Assert(ne, "Received invalid return address.", t0, 6906 __ Assert(ne, "Received invalid return address.", t0,
6898 Operand(reinterpret_cast<uint32_t>(kZapValue))); 6907 Operand(reinterpret_cast<uint32_t>(kZapValue)));
6899 } 6908 }
6900 __ Jump(t9); 6909 __ Jump(t9);
6901 } 6910 }
6902 6911
(...skipping 29 matching lines...) Expand all
6932 masm->sw(ra, MemOperand(sp, kCArgsSlotsSize)); 6941 masm->sw(ra, MemOperand(sp, kCArgsSlotsSize));
6933 masm->li(ra, Operand(reinterpret_cast<intptr_t>(GetCode().location()), 6942 masm->li(ra, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
6934 RelocInfo::CODE_TARGET), true); 6943 RelocInfo::CODE_TARGET), true);
6935 // Call the function. 6944 // Call the function.
6936 masm->Jump(t9); 6945 masm->Jump(t9);
6937 // Make sure the stored 'ra' points to this position. 6946 // Make sure the stored 'ra' points to this position.
6938 ASSERT_EQ(kNumInstructionsToJump, masm->InstructionsGeneratedSince(&find_ra)); 6947 ASSERT_EQ(kNumInstructionsToJump, masm->InstructionsGeneratedSince(&find_ra));
6939 } 6948 }
6940 6949
6941 6950
6942 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( 6951 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
6952 Label* miss,
6953 Label* done,
6954 Register receiver,
6955 Register properties,
6956 Handle<String> name,
6957 Register scratch0) {
6958 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6959 // not equal to the name and kProbes-th slot is not used (its name is the
6960 // undefined value), it guarantees the hash table doesn't contain the
6961 // property. It's true even if some slots represent deleted properties
6962 // (their names are the null value).
6963 for (int i = 0; i < kInlinedProbes; i++) {
6964 // scratch0 points to properties hash.
6965 // Compute the masked index: (hash + i + i * i) & mask.
6966 Register index = scratch0;
6967 // Capacity is smi 2^n.
6968 __ lw(index, FieldMemOperand(properties, kCapacityOffset));
6969 __ Subu(index, index, Operand(1));
6970 __ And(index, index, Operand(
6971 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
6972
6973 // Scale the index by multiplying by the entry size.
6974 ASSERT(StringDictionary::kEntrySize == 3);
6975 __ sll(at, index, 1);
6976 __ Addu(index, index, at);
6977
6978 Register entity_name = scratch0;
6979 // Having undefined at this place means the name is not contained.
6980 ASSERT_EQ(kSmiTagSize, 1);
6981 Register tmp = properties;
6982 __ sll(tmp, index, 1);
6983 __ Addu(tmp, properties, tmp);
6984 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
6985
6986 ASSERT(!tmp.is(entity_name));
6987 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
6988 __ Branch(done, eq, entity_name, Operand(tmp));
6989
6990 if (i != kInlinedProbes - 1) {
6991 // Stop if found the property.
6992 __ Branch(miss, eq, entity_name, Operand(Handle<String>(name)));
6993
6994 // Check if the entry name is not a symbol.
6995 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
6996 __ lbu(entity_name,
6997 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
6998 __ And(tmp, entity_name, Operand(kIsSymbolMask));
6999 __ Branch(miss, eq, tmp, Operand(zero_reg));
7000
7001 // Restore the properties.
7002 __ lw(properties,
7003 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
7004 }
7005 }
7006
7007 const int spill_mask =
7008 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
7009 a2.bit() | a1.bit() | a0.bit() | v0.bit());
7010
7011 __ MultiPush(spill_mask);
7012 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
7013 __ li(a1, Operand(Handle<String>(name)));
7014 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
7015 __ CallStub(&stub);
7016 __ mov(at, v0);
7017 __ MultiPop(spill_mask);
7018
7019 __ Branch(done, eq, at, Operand(zero_reg));
7020 __ Branch(miss, ne, at, Operand(zero_reg));
7021 }
7022
7023
7024 // TODO(kmillikin): Eliminate this function when the stub cache is fully
7025 // handlified.
7026 MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
6943 MacroAssembler* masm, 7027 MacroAssembler* masm,
6944 Label* miss, 7028 Label* miss,
6945 Label* done, 7029 Label* done,
6946 Register receiver, 7030 Register receiver,
6947 Register properties, 7031 Register properties,
6948 String* name, 7032 String* name,
6949 Register scratch0) { 7033 Register scratch0) {
6950 // If names of slots in range from 1 to kProbes - 1 for the hash value are 7034 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6951 // not equal to the name and kProbes-th slot is not used (its name is the 7035 // not equal to the name and kProbes-th slot is not used (its name is the
6952 // undefined value), it guarantees the hash table doesn't contain the 7036 // undefined value), it guarantees the hash table doesn't contain the
6953 // property. It's true even if some slots represent deleted properties 7037 // property. It's true even if some slots represent deleted properties
6954 // (their names are the null value). 7038 // (their names are the null value).
6955 for (int i = 0; i < kInlinedProbes; i++) { 7039 for (int i = 0; i < kInlinedProbes; i++) {
6956 // scratch0 points to properties hash. 7040 // scratch0 points to properties hash.
6957 // Compute the masked index: (hash + i + i * i) & mask. 7041 // Compute the masked index: (hash + i + i * i) & mask.
6958 Register index = scratch0; 7042 Register index = scratch0;
6959 // Capacity is smi 2^n. 7043 // Capacity is smi 2^n.
6960 __ lw(index, FieldMemOperand(properties, kCapacityOffset)); 7044 __ lw(index, FieldMemOperand(properties, kCapacityOffset));
6961 __ Subu(index, index, Operand(1)); 7045 __ Subu(index, index, Operand(1));
6962 __ And(index, index, Operand( 7046 __ And(index, index, Operand(
6963 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); 7047 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
6964 7048
6965 // Scale the index by multiplying by the entry size. 7049 // Scale the index by multiplying by the entry size.
6966 ASSERT(StringDictionary::kEntrySize == 3); 7050 ASSERT(StringDictionary::kEntrySize == 3);
6967 // index *= 3. 7051 // index *= 3.
6968 __ mov(at, index); 7052 __ sll(at, index, 1);
6969 __ sll(index, index, 1);
6970 __ Addu(index, index, at); 7053 __ Addu(index, index, at);
6971 7054
6972 Register entity_name = scratch0; 7055 Register entity_name = scratch0;
6973 // Having undefined at this place means the name is not contained. 7056 // Having undefined at this place means the name is not contained.
6974 ASSERT_EQ(kSmiTagSize, 1); 7057 ASSERT_EQ(kSmiTagSize, 1);
6975 Register tmp = properties; 7058 Register tmp = properties;
6976 7059
6977 __ sll(scratch0, index, 1); 7060 __ sll(scratch0, index, 1);
6978 __ Addu(tmp, properties, scratch0); 7061 __ Addu(tmp, properties, scratch0);
6979 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); 7062 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
(...skipping 14 matching lines...) Expand all
6994 __ Branch(miss, eq, scratch0, Operand(zero_reg)); 7077 __ Branch(miss, eq, scratch0, Operand(zero_reg));
6995 7078
6996 // Restore the properties. 7079 // Restore the properties.
6997 __ lw(properties, 7080 __ lw(properties,
6998 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 7081 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
6999 } 7082 }
7000 } 7083 }
7001 7084
7002 const int spill_mask = 7085 const int spill_mask =
7003 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | 7086 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
7004 a2.bit() | a1.bit() | a0.bit()); 7087 a2.bit() | a1.bit() | a0.bit() | v0.bit());
7005 7088
7006 __ MultiPush(spill_mask); 7089 __ MultiPush(spill_mask);
7007 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 7090 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
7008 __ li(a1, Operand(Handle<String>(name))); 7091 __ li(a1, Operand(Handle<String>(name)));
7009 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); 7092 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
7010 MaybeObject* result = masm->TryCallStub(&stub); 7093 MaybeObject* result = masm->TryCallStub(&stub);
7011 if (result->IsFailure()) return result; 7094 if (result->IsFailure()) return result;
7095 __ mov(at, v0);
7012 __ MultiPop(spill_mask); 7096 __ MultiPop(spill_mask);
7013 7097
7014 __ Branch(done, eq, v0, Operand(zero_reg)); 7098 __ Branch(done, eq, at, Operand(zero_reg));
7015 __ Branch(miss, ne, v0, Operand(zero_reg)); 7099 __ Branch(miss, ne, at, Operand(zero_reg));
7016 return result; 7100 return result;
7017 } 7101 }
7018 7102
7019 7103
7020 // Probe the string dictionary in the |elements| register. Jump to the 7104 // Probe the string dictionary in the |elements| register. Jump to the
7021 // |done| label if a property with the given name is found. Jump to 7105 // |done| label if a property with the given name is found. Jump to
7022 // the |miss| label otherwise. 7106 // the |miss| label otherwise.
7023 // If lookup was successful |scratch2| will be equal to elements + 4 * index. 7107 // If lookup was successful |scratch2| will be equal to elements + 4 * index.
7024 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, 7108 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
7025 Label* miss, 7109 Label* miss,
(...skipping 25 matching lines...) Expand all
7051 __ Addu(scratch2, scratch2, Operand( 7135 __ Addu(scratch2, scratch2, Operand(
7052 StringDictionary::GetProbeOffset(i) << String::kHashShift)); 7136 StringDictionary::GetProbeOffset(i) << String::kHashShift));
7053 } 7137 }
7054 __ srl(scratch2, scratch2, String::kHashShift); 7138 __ srl(scratch2, scratch2, String::kHashShift);
7055 __ And(scratch2, scratch1, scratch2); 7139 __ And(scratch2, scratch1, scratch2);
7056 7140
7057 // Scale the index by multiplying by the element size. 7141 // Scale the index by multiplying by the element size.
7058 ASSERT(StringDictionary::kEntrySize == 3); 7142 ASSERT(StringDictionary::kEntrySize == 3);
7059 // scratch2 = scratch2 * 3. 7143 // scratch2 = scratch2 * 3.
7060 7144
7061 __ mov(at, scratch2); 7145 __ sll(at, scratch2, 1);
7062 __ sll(scratch2, scratch2, 1);
7063 __ Addu(scratch2, scratch2, at); 7146 __ Addu(scratch2, scratch2, at);
7064 7147
7065 // Check if the key is identical to the name. 7148 // Check if the key is identical to the name.
7066 __ sll(at, scratch2, 2); 7149 __ sll(at, scratch2, 2);
7067 __ Addu(scratch2, elements, at); 7150 __ Addu(scratch2, elements, at);
7068 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset)); 7151 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset));
7069 __ Branch(done, eq, name, Operand(at)); 7152 __ Branch(done, eq, name, Operand(at));
7070 } 7153 }
7071 7154
7072 const int spill_mask = 7155 const int spill_mask =
7073 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | 7156 (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
7074 a3.bit() | a2.bit() | a1.bit() | a0.bit()) & 7157 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
7075 ~(scratch1.bit() | scratch2.bit()); 7158 ~(scratch1.bit() | scratch2.bit());
7076 7159
7077 __ MultiPush(spill_mask); 7160 __ MultiPush(spill_mask);
7078 __ Move(a0, elements); 7161 if (name.is(a0)) {
7079 __ Move(a1, name); 7162 ASSERT(!elements.is(a1));
7163 __ Move(a1, name);
7164 __ Move(a0, elements);
7165 } else {
7166 __ Move(a0, elements);
7167 __ Move(a1, name);
7168 }
7080 StringDictionaryLookupStub stub(POSITIVE_LOOKUP); 7169 StringDictionaryLookupStub stub(POSITIVE_LOOKUP);
7081 __ CallStub(&stub); 7170 __ CallStub(&stub);
7082 __ mov(scratch2, a2); 7171 __ mov(scratch2, a2);
7172 __ mov(at, v0);
7083 __ MultiPop(spill_mask); 7173 __ MultiPop(spill_mask);
7084 7174
7085 __ Branch(done, ne, v0, Operand(zero_reg)); 7175 __ Branch(done, ne, at, Operand(zero_reg));
7086 __ Branch(miss, eq, v0, Operand(zero_reg)); 7176 __ Branch(miss, eq, at, Operand(zero_reg));
7087 } 7177 }
7088 7178
7089 7179
7090 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { 7180 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
7091 // This stub overrides SometimesSetsUpAFrame() to return false. That means 7181 // This stub overrides SometimesSetsUpAFrame() to return false. That means
7092 // we cannot call anything that could cause a GC from this stub. 7182 // we cannot call anything that could cause a GC from this stub.
7093 // Registers: 7183 // Registers:
7094 // result: StringDictionary to probe 7184 // result: StringDictionary to probe
7095 // a1: key 7185 // a1: key
7096 // : StringDictionary to probe. 7186 // : StringDictionary to probe.
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
7200 // Used in CompileStoreGlobal. 7290 // Used in CompileStoreGlobal.
7201 { t0, a1, a2, OMIT_REMEMBERED_SET }, 7291 { t0, a1, a2, OMIT_REMEMBERED_SET },
7202 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField. 7292 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
7203 { a1, a2, a3, EMIT_REMEMBERED_SET }, 7293 { a1, a2, a3, EMIT_REMEMBERED_SET },
7204 { a3, a2, a1, EMIT_REMEMBERED_SET }, 7294 { a3, a2, a1, EMIT_REMEMBERED_SET },
7205 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. 7295 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
7206 { a2, a1, a3, EMIT_REMEMBERED_SET }, 7296 { a2, a1, a3, EMIT_REMEMBERED_SET },
7207 { a3, a1, a2, EMIT_REMEMBERED_SET }, 7297 { a3, a1, a2, EMIT_REMEMBERED_SET },
7208 // KeyedStoreStubCompiler::GenerateStoreFastElement. 7298 // KeyedStoreStubCompiler::GenerateStoreFastElement.
7209 { t0, a2, a3, EMIT_REMEMBERED_SET }, 7299 { t0, a2, a3, EMIT_REMEMBERED_SET },
7300 // ElementsTransitionGenerator::GenerateSmiOnlyToObject
7301 // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
7302 // and ElementsTransitionGenerator::GenerateDoubleToObject
7303 { a2, a3, t5, EMIT_REMEMBERED_SET },
7304 // ElementsTransitionGenerator::GenerateDoubleToObject
7305 { t2, a2, a0, EMIT_REMEMBERED_SET },
7306 { a2, t2, t5, EMIT_REMEMBERED_SET },
7210 // Null termination. 7307 // Null termination.
7211 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} 7308 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
7212 }; 7309 };
7213 7310
7214 7311
7215 bool RecordWriteStub::IsPregenerated() { 7312 bool RecordWriteStub::IsPregenerated() {
7216 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; 7313 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7217 !entry->object.is(no_reg); 7314 !entry->object.is(no_reg);
7218 entry++) { 7315 entry++) {
7219 if (object_.is(entry->object) && 7316 if (object_.is(entry->object) &&
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after
7444 7541
7445 // Fall through when we need to inform the incremental marker. 7542 // Fall through when we need to inform the incremental marker.
7446 } 7543 }
7447 7544
7448 7545
7449 #undef __ 7546 #undef __
7450 7547
7451 } } // namespace v8::internal 7548 } } // namespace v8::internal
7452 7549
7453 #endif // V8_TARGET_ARCH_MIPS 7550 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/codegen-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698