Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(456)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after
256 256
257 257
258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 258 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
259 // Stack layout on entry: 259 // Stack layout on entry:
260 // 260 //
261 // [sp]: constant elements. 261 // [sp]: constant elements.
262 // [sp + kPointerSize]: literal index. 262 // [sp + kPointerSize]: literal index.
263 // [sp + (2 * kPointerSize)]: literals array. 263 // [sp + (2 * kPointerSize)]: literals array.
264 264
265 // All sizes here are multiples of kPointerSize. 265 // All sizes here are multiples of kPointerSize.
266 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; 266 int elements_size = 0;
267 if (length_ > 0) {
268 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
269 ? FixedDoubleArray::SizeFor(length_)
270 : FixedArray::SizeFor(length_);
271 }
267 int size = JSArray::kSize + elements_size; 272 int size = JSArray::kSize + elements_size;
268 273
269 // Load boilerplate object into r3 and check if we need to create a 274 // Load boilerplate object into r3 and check if we need to create a
270 // boilerplate. 275 // boilerplate.
271 Label slow_case; 276 Label slow_case;
272 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); 277 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
273 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); 278 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
274 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 279 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
275 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 280 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
276 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 281 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
277 __ cmp(r3, ip); 282 __ cmp(r3, ip);
278 __ b(eq, &slow_case); 283 __ b(eq, &slow_case);
279 284
280 if (FLAG_debug_code) { 285 if (FLAG_debug_code) {
281 const char* message; 286 const char* message;
282 Heap::RootListIndex expected_map_index; 287 Heap::RootListIndex expected_map_index;
283 if (mode_ == CLONE_ELEMENTS) { 288 if (mode_ == CLONE_ELEMENTS) {
284 message = "Expected (writable) fixed array"; 289 message = "Expected (writable) fixed array";
285 expected_map_index = Heap::kFixedArrayMapRootIndex; 290 expected_map_index = Heap::kFixedArrayMapRootIndex;
291 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
292 message = "Expected (writable) fixed double array";
293 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
286 } else { 294 } else {
287 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); 295 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
288 message = "Expected copy-on-write fixed array"; 296 message = "Expected copy-on-write fixed array";
289 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; 297 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
290 } 298 }
291 __ push(r3); 299 __ push(r3);
292 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); 300 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
293 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 301 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
294 __ LoadRoot(ip, expected_map_index); 302 __ LoadRoot(ip, expected_map_index);
295 __ cmp(r3, ip); 303 __ cmp(r3, ip);
(...skipping 19 matching lines...) Expand all
315 } 323 }
316 324
317 if (length_ > 0) { 325 if (length_ > 0) {
318 // Get hold of the elements array of the boilerplate and setup the 326 // Get hold of the elements array of the boilerplate and setup the
319 // elements pointer in the resulting object. 327 // elements pointer in the resulting object.
320 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); 328 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
321 __ add(r2, r0, Operand(JSArray::kSize)); 329 __ add(r2, r0, Operand(JSArray::kSize));
322 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); 330 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
323 331
324 // Copy the elements array. 332 // Copy the elements array.
333 ASSERT((elements_size % kPointerSize) == 0);
325 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); 334 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
326 } 335 }
327 336
328 // Return and remove the on-stack parameters. 337 // Return and remove the on-stack parameters.
329 __ add(sp, sp, Operand(3 * kPointerSize)); 338 __ add(sp, sp, Operand(3 * kPointerSize));
330 __ Ret(); 339 __ Ret();
331 340
332 __ bind(&slow_case); 341 __ bind(&slow_case);
333 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 342 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
334 } 343 }
(...skipping 3571 matching lines...) Expand 10 before | Expand all | Expand 10 after
3906 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex); 3915 __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
3907 __ cmp(map, ip); 3916 __ cmp(map, ip);
3908 __ b(ne, &miss); 3917 __ b(ne, &miss);
3909 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); 3918 __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
3910 __ Ret(HasArgsInRegisters() ? 0 : 2); 3919 __ Ret(HasArgsInRegisters() ? 0 : 2);
3911 3920
3912 __ bind(&miss); 3921 __ bind(&miss);
3913 } 3922 }
3914 3923
3915 // Get the prototype of the function. 3924 // Get the prototype of the function.
3916 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); 3925 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
3917 3926
3918 // Check that the function prototype is a JS object. 3927 // Check that the function prototype is a JS object.
3919 __ JumpIfSmi(prototype, &slow); 3928 __ JumpIfSmi(prototype, &slow);
3920 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); 3929 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
3921 3930
3922 // Update the global instanceof or call site inlined cache with the current 3931 // Update the global instanceof or call site inlined cache with the current
3923 // map and function. The cached answer will be set when it is known below. 3932 // map and function. The cached answer will be set when it is known below.
3924 if (!HasCallSiteInlineCheck()) { 3933 if (!HasCallSiteInlineCheck()) {
3925 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); 3934 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
3926 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); 3935 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
(...skipping 2734 matching lines...) Expand 10 before | Expand all | Expand 10 after
6661 Label start; 6670 Label start;
6662 __ bind(&start); 6671 __ bind(&start);
6663 __ add(ip, pc, Operand(Assembler::kInstrSize)); 6672 __ add(ip, pc, Operand(Assembler::kInstrSize));
6664 __ str(ip, MemOperand(sp, 0)); 6673 __ str(ip, MemOperand(sp, 0));
6665 __ Jump(target); // Call the C++ function. 6674 __ Jump(target); // Call the C++ function.
6666 ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta, 6675 ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta,
6667 masm->SizeOfCodeGeneratedSince(&start)); 6676 masm->SizeOfCodeGeneratedSince(&start));
6668 } 6677 }
6669 6678
6670 6679
6671 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( 6680 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
6681 Label* miss,
6682 Label* done,
6683 Register receiver,
6684 Register properties,
6685 Handle<String> name,
6686 Register scratch0) {
6687 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6688 // not equal to the name and kProbes-th slot is not used (its name is the
6689 // undefined value), it guarantees the hash table doesn't contain the
6690 // property. It's true even if some slots represent deleted properties
6691 // (their names are the null value).
6692 for (int i = 0; i < kInlinedProbes; i++) {
6693 // scratch0 points to properties hash.
6694 // Compute the masked index: (hash + i + i * i) & mask.
6695 Register index = scratch0;
6696 // Capacity is smi 2^n.
6697 __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
6698 __ sub(index, index, Operand(1));
6699 __ and_(index, index, Operand(
6700 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
6701
6702 // Scale the index by multiplying by the entry size.
6703 ASSERT(StringDictionary::kEntrySize == 3);
6704 __ add(index, index, Operand(index, LSL, 1)); // index *= 3.
6705
6706 Register entity_name = scratch0;
6707 // Having undefined at this place means the name is not contained.
6708 ASSERT_EQ(kSmiTagSize, 1);
6709 Register tmp = properties;
6710 __ add(tmp, properties, Operand(index, LSL, 1));
6711 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
6712
6713 ASSERT(!tmp.is(entity_name));
6714 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
6715 __ cmp(entity_name, tmp);
6716 __ b(eq, done);
6717
6718 if (i != kInlinedProbes - 1) {
6719 // Stop if found the property.
6720 __ cmp(entity_name, Operand(Handle<String>(name)));
6721 __ b(eq, miss);
6722
6723 // Check if the entry name is not a symbol.
6724 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
6725 __ ldrb(entity_name,
6726 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
6727 __ tst(entity_name, Operand(kIsSymbolMask));
6728 __ b(eq, miss);
6729
6730 // Restore the properties.
6731 __ ldr(properties,
6732 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
6733 }
6734 }
6735
6736 const int spill_mask =
6737 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() |
6738 r2.bit() | r1.bit() | r0.bit());
6739
6740 __ stm(db_w, sp, spill_mask);
6741 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
6742 __ mov(r1, Operand(Handle<String>(name)));
6743 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
6744 __ CallStub(&stub);
6745 __ tst(r0, Operand(r0));
6746 __ ldm(ia_w, sp, spill_mask);
6747
6748 __ b(eq, done);
6749 __ b(ne, miss);
6750 }
6751
6752
6753 // TODO(kmillikin): Eliminate this function when the stub cache is fully
6754 // handlified.
6755 MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup(
6672 MacroAssembler* masm, 6756 MacroAssembler* masm,
6673 Label* miss, 6757 Label* miss,
6674 Label* done, 6758 Label* done,
6675 Register receiver, 6759 Register receiver,
6676 Register properties, 6760 Register properties,
6677 String* name, 6761 String* name,
6678 Register scratch0) { 6762 Register scratch0) {
6679 // If names of slots in range from 1 to kProbes - 1 for the hash value are 6763 // If names of slots in range from 1 to kProbes - 1 for the hash value are
6680 // not equal to the name and kProbes-th slot is not used (its name is the 6764 // not equal to the name and kProbes-th slot is not used (its name is the
6681 // undefined value), it guarantees the hash table doesn't contain the 6765 // undefined value), it guarantees the hash table doesn't contain the
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
6920 // Used in CompileStoreGlobal. 7004 // Used in CompileStoreGlobal.
6921 { r4, r1, r2, OMIT_REMEMBERED_SET }, 7005 { r4, r1, r2, OMIT_REMEMBERED_SET },
6922 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField. 7006 // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
6923 { r1, r2, r3, EMIT_REMEMBERED_SET }, 7007 { r1, r2, r3, EMIT_REMEMBERED_SET },
6924 { r3, r2, r1, EMIT_REMEMBERED_SET }, 7008 { r3, r2, r1, EMIT_REMEMBERED_SET },
6925 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. 7009 // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
6926 { r2, r1, r3, EMIT_REMEMBERED_SET }, 7010 { r2, r1, r3, EMIT_REMEMBERED_SET },
6927 { r3, r1, r2, EMIT_REMEMBERED_SET }, 7011 { r3, r1, r2, EMIT_REMEMBERED_SET },
6928 // KeyedStoreStubCompiler::GenerateStoreFastElement. 7012 // KeyedStoreStubCompiler::GenerateStoreFastElement.
6929 { r4, r2, r3, EMIT_REMEMBERED_SET }, 7013 { r4, r2, r3, EMIT_REMEMBERED_SET },
7014 // ElementsTransitionGenerator::GenerateSmiOnlyToObject
7015 // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
7016 // and ElementsTransitionGenerator::GenerateDoubleToObject
7017 { r2, r3, r9, EMIT_REMEMBERED_SET },
7018 // ElementsTransitionGenerator::GenerateDoubleToObject
7019 { r6, r2, r0, EMIT_REMEMBERED_SET },
7020 { r2, r6, r9, EMIT_REMEMBERED_SET },
6930 // Null termination. 7021 // Null termination.
6931 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} 7022 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
6932 }; 7023 };
6933 7024
6934 7025
6935 bool RecordWriteStub::IsPregenerated() { 7026 bool RecordWriteStub::IsPregenerated() {
6936 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; 7027 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
6937 !entry->object.is(no_reg); 7028 !entry->object.is(no_reg);
6938 entry++) { 7029 entry++) {
6939 if (object_.is(entry->object) && 7030 if (object_.is(entry->object) &&
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
7156 } 7247 }
7157 7248
7158 __ bind(&need_incremental_pop_scratch); 7249 __ bind(&need_incremental_pop_scratch);
7159 __ Pop(regs_.object(), regs_.address()); 7250 __ Pop(regs_.object(), regs_.address());
7160 7251
7161 __ bind(&need_incremental); 7252 __ bind(&need_incremental);
7162 7253
7163 // Fall through when we need to inform the incremental marker. 7254 // Fall through when we need to inform the incremental marker.
7164 } 7255 }
7165 7256
7166
7167 #undef __ 7257 #undef __
7168 7258
7169 } } // namespace v8::internal 7259 } } // namespace v8::internal
7170 7260
7171 #endif // V8_TARGET_ARCH_ARM 7261 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698