| OLD | NEW | 
|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 220 | 220 | 
| 221 | 221 | 
| 222 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 222 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 
| 223   // Stack layout on entry: | 223   // Stack layout on entry: | 
| 224   // | 224   // | 
| 225   // [rsp + kPointerSize]: constant elements. | 225   // [rsp + kPointerSize]: constant elements. | 
| 226   // [rsp + (2 * kPointerSize)]: literal index. | 226   // [rsp + (2 * kPointerSize)]: literal index. | 
| 227   // [rsp + (3 * kPointerSize)]: literals array. | 227   // [rsp + (3 * kPointerSize)]: literals array. | 
| 228 | 228 | 
| 229   // All sizes here are multiples of kPointerSize. | 229   // All sizes here are multiples of kPointerSize. | 
| 230   int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; | 230   int elements_size = 0; | 
|  | 231   if (length_ > 0) { | 
|  | 232     elements_size = mode_ == CLONE_DOUBLE_ELEMENTS | 
|  | 233         ? FixedDoubleArray::SizeFor(length_) | 
|  | 234         : FixedArray::SizeFor(length_); | 
|  | 235   } | 
| 231   int size = JSArray::kSize + elements_size; | 236   int size = JSArray::kSize + elements_size; | 
| 232 | 237 | 
| 233   // Load boilerplate object into rcx and check if we need to create a | 238   // Load boilerplate object into rcx and check if we need to create a | 
| 234   // boilerplate. | 239   // boilerplate. | 
| 235   Label slow_case; | 240   Label slow_case; | 
| 236   __ movq(rcx, Operand(rsp, 3 * kPointerSize)); | 241   __ movq(rcx, Operand(rsp, 3 * kPointerSize)); | 
| 237   __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 242   __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 
| 238   SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 243   SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); | 
| 239   __ movq(rcx, | 244   __ movq(rcx, | 
| 240           FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); | 245           FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); | 
| 241   __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 246   __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 
| 242   __ j(equal, &slow_case); | 247   __ j(equal, &slow_case); | 
| 243 | 248 | 
| 244   if (FLAG_debug_code) { | 249   if (FLAG_debug_code) { | 
| 245     const char* message; | 250     const char* message; | 
| 246     Heap::RootListIndex expected_map_index; | 251     Heap::RootListIndex expected_map_index; | 
| 247     if (mode_ == CLONE_ELEMENTS) { | 252     if (mode_ == CLONE_ELEMENTS) { | 
| 248       message = "Expected (writable) fixed array"; | 253       message = "Expected (writable) fixed array"; | 
| 249       expected_map_index = Heap::kFixedArrayMapRootIndex; | 254       expected_map_index = Heap::kFixedArrayMapRootIndex; | 
|  | 255     } else if (mode_ == CLONE_DOUBLE_ELEMENTS) { | 
|  | 256       message = "Expected (writable) fixed double array"; | 
|  | 257       expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; | 
| 250     } else { | 258     } else { | 
| 251       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); | 259       ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS); | 
| 252       message = "Expected copy-on-write fixed array"; | 260       message = "Expected copy-on-write fixed array"; | 
| 253       expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 261       expected_map_index = Heap::kFixedCOWArrayMapRootIndex; | 
| 254     } | 262     } | 
| 255     __ push(rcx); | 263     __ push(rcx); | 
| 256     __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 264     __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 
| 257     __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 265     __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | 
| 258                    expected_map_index); | 266                    expected_map_index); | 
| 259     __ Assert(equal, message); | 267     __ Assert(equal, message); | 
| (...skipping 13 matching lines...) Expand all  Loading... | 
| 273   } | 281   } | 
| 274 | 282 | 
| 275   if (length_ > 0) { | 283   if (length_ > 0) { | 
| 276     // Get hold of the elements array of the boilerplate and setup the | 284     // Get hold of the elements array of the boilerplate and setup the | 
| 277     // elements pointer in the resulting object. | 285     // elements pointer in the resulting object. | 
| 278     __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 286     __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); | 
| 279     __ lea(rdx, Operand(rax, JSArray::kSize)); | 287     __ lea(rdx, Operand(rax, JSArray::kSize)); | 
| 280     __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); | 288     __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); | 
| 281 | 289 | 
| 282     // Copy the elements array. | 290     // Copy the elements array. | 
| 283     for (int i = 0; i < elements_size; i += kPointerSize) { | 291     if (mode_ == CLONE_ELEMENTS) { | 
| 284       __ movq(rbx, FieldOperand(rcx, i)); | 292       for (int i = 0; i < elements_size; i += kPointerSize) { | 
| 285       __ movq(FieldOperand(rdx, i), rbx); | 293         __ movq(rbx, FieldOperand(rcx, i)); | 
|  | 294         __ movq(FieldOperand(rdx, i), rbx); | 
|  | 295       } | 
|  | 296     } else { | 
|  | 297       ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS); | 
|  | 298       int i; | 
|  | 299       for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { | 
|  | 300         __ movq(rbx, FieldOperand(rcx, i)); | 
|  | 301         __ movq(FieldOperand(rdx, i), rbx); | 
|  | 302       } | 
|  | 303       while (i < elements_size) { | 
|  | 304         __ movsd(xmm0, FieldOperand(rcx, i)); | 
|  | 305         __ movsd(FieldOperand(rdx, i), xmm0); | 
|  | 306         i += kDoubleSize; | 
|  | 307       } | 
|  | 308       ASSERT(i == elements_size); | 
| 286     } | 309     } | 
| 287   } | 310   } | 
| 288 | 311 | 
| 289   // Return and remove the on-stack parameters. | 312   // Return and remove the on-stack parameters. | 
| 290   __ ret(3 * kPointerSize); | 313   __ ret(3 * kPointerSize); | 
| 291 | 314 | 
| 292   __ bind(&slow_case); | 315   __ bind(&slow_case); | 
| 293   __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 316   __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 
| 294 } | 317 } | 
| 295 | 318 | 
| (...skipping 3576 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3872     Label miss; | 3895     Label miss; | 
| 3873     __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3896     __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 
| 3874     __ j(not_equal, &miss, Label::kNear); | 3897     __ j(not_equal, &miss, Label::kNear); | 
| 3875     __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3898     __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 
| 3876     __ j(not_equal, &miss, Label::kNear); | 3899     __ j(not_equal, &miss, Label::kNear); | 
| 3877     __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3900     __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 
| 3878     __ ret(2 * kPointerSize); | 3901     __ ret(2 * kPointerSize); | 
| 3879     __ bind(&miss); | 3902     __ bind(&miss); | 
| 3880   } | 3903   } | 
| 3881 | 3904 | 
| 3882   __ TryGetFunctionPrototype(rdx, rbx, &slow); | 3905   __ TryGetFunctionPrototype(rdx, rbx, &slow, true); | 
| 3883 | 3906 | 
| 3884   // Check that the function prototype is a JS object. | 3907   // Check that the function prototype is a JS object. | 
| 3885   __ JumpIfSmi(rbx, &slow); | 3908   __ JumpIfSmi(rbx, &slow); | 
| 3886   __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); | 3909   __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); | 
| 3887   __ j(below, &slow); | 3910   __ j(below, &slow); | 
| 3888   __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE); | 3911   __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE); | 
| 3889   __ j(above, &slow); | 3912   __ j(above, &slow); | 
| 3890 | 3913 | 
| 3891   // Register mapping: | 3914   // Register mapping: | 
| 3892   //   rax is object map. | 3915   //   rax is object map. | 
| (...skipping 1538 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5431   __ pop(rcx); | 5454   __ pop(rcx); | 
| 5432   __ pop(rax); | 5455   __ pop(rax); | 
| 5433   __ pop(rdx); | 5456   __ pop(rdx); | 
| 5434   __ push(rcx); | 5457   __ push(rcx); | 
| 5435 | 5458 | 
| 5436   // Do a tail call to the rewritten stub. | 5459   // Do a tail call to the rewritten stub. | 
| 5437   __ jmp(rdi); | 5460   __ jmp(rdi); | 
| 5438 } | 5461 } | 
| 5439 | 5462 | 
| 5440 | 5463 | 
| 5441 MaybeObject* StringDictionaryLookupStub::GenerateNegativeLookup( | 5464 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, | 
|  | 5465                                                         Label* miss, | 
|  | 5466                                                         Label* done, | 
|  | 5467                                                         Register properties, | 
|  | 5468                                                         Handle<String> name, | 
|  | 5469                                                         Register r0) { | 
|  | 5470   // If names of slots in range from 1 to kProbes - 1 for the hash value are | 
|  | 5471   // not equal to the name and kProbes-th slot is not used (its name is the | 
|  | 5472   // undefined value), it guarantees the hash table doesn't contain the | 
|  | 5473   // property. It's true even if some slots represent deleted properties | 
|  | 5474   // (their names are the null value). | 
|  | 5475   for (int i = 0; i < kInlinedProbes; i++) { | 
|  | 5476     // r0 points to properties hash. | 
|  | 5477     // Compute the masked index: (hash + i + i * i) & mask. | 
|  | 5478     Register index = r0; | 
|  | 5479     // Capacity is smi 2^n. | 
|  | 5480     __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); | 
|  | 5481     __ decl(index); | 
|  | 5482     __ and_(index, | 
|  | 5483             Immediate(name->Hash() + StringDictionary::GetProbeOffset(i))); | 
|  | 5484 | 
|  | 5485     // Scale the index by multiplying by the entry size. | 
|  | 5486     ASSERT(StringDictionary::kEntrySize == 3); | 
|  | 5487     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3. | 
|  | 5488 | 
|  | 5489     Register entity_name = r0; | 
|  | 5490     // Having undefined at this place means the name is not contained. | 
|  | 5491     ASSERT_EQ(kSmiTagSize, 1); | 
|  | 5492     __ movq(entity_name, Operand(properties, | 
|  | 5493                                  index, | 
|  | 5494                                  times_pointer_size, | 
|  | 5495                                  kElementsStartOffset - kHeapObjectTag)); | 
|  | 5496     __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); | 
|  | 5497     __ j(equal, done); | 
|  | 5498 | 
|  | 5499     // Stop if found the property. | 
|  | 5500     __ Cmp(entity_name, Handle<String>(name)); | 
|  | 5501     __ j(equal, miss); | 
|  | 5502 | 
|  | 5503     // Check if the entry name is not a symbol. | 
|  | 5504     __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); | 
|  | 5505     __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset), | 
|  | 5506              Immediate(kIsSymbolMask)); | 
|  | 5507     __ j(zero, miss); | 
|  | 5508   } | 
|  | 5509 | 
|  | 5510   StringDictionaryLookupStub stub(properties, | 
|  | 5511                                   r0, | 
|  | 5512                                   r0, | 
|  | 5513                                   StringDictionaryLookupStub::NEGATIVE_LOOKUP); | 
|  | 5514   __ Push(Handle<Object>(name)); | 
|  | 5515   __ push(Immediate(name->Hash())); | 
|  | 5516   __ CallStub(&stub); | 
|  | 5517   __ testq(r0, r0); | 
|  | 5518   __ j(not_zero, miss); | 
|  | 5519   __ jmp(done); | 
|  | 5520 } | 
|  | 5521 | 
|  | 5522 | 
|  | 5523 // TODO(kmillikin): Eliminate this function when the stub cache is fully | 
|  | 5524 // handlified. | 
|  | 5525 MaybeObject* StringDictionaryLookupStub::TryGenerateNegativeLookup( | 
| 5442     MacroAssembler* masm, | 5526     MacroAssembler* masm, | 
| 5443     Label* miss, | 5527     Label* miss, | 
| 5444     Label* done, | 5528     Label* done, | 
| 5445     Register properties, | 5529     Register properties, | 
| 5446     String* name, | 5530     String* name, | 
| 5447     Register r0) { | 5531     Register r0) { | 
| 5448   // If names of slots in range from 1 to kProbes - 1 for the hash value are | 5532   // If names of slots in range from 1 to kProbes - 1 for the hash value are | 
| 5449   // not equal to the name and kProbes-th slot is not used (its name is the | 5533   // not equal to the name and kProbes-th slot is not used (its name is the | 
| 5450   // undefined value), it guarantees the hash table doesn't contain the | 5534   // undefined value), it guarantees the hash table doesn't contain the | 
| 5451   // property. It's true even if some slots represent deleted properties | 5535   // property. It's true even if some slots represent deleted properties | 
| (...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5658   { rdx, rcx, rbx, EMIT_REMEMBERED_SET }, | 5742   { rdx, rcx, rbx, EMIT_REMEMBERED_SET }, | 
| 5659   // GenerateStoreField calls the stub with two different permutations of | 5743   // GenerateStoreField calls the stub with two different permutations of | 
| 5660   // registers.  This is the second. | 5744   // registers.  This is the second. | 
| 5661   { rbx, rcx, rdx, EMIT_REMEMBERED_SET }, | 5745   { rbx, rcx, rdx, EMIT_REMEMBERED_SET }, | 
| 5662   // StoreIC::GenerateNormal via GenerateDictionaryStore. | 5746   // StoreIC::GenerateNormal via GenerateDictionaryStore. | 
| 5663   { rbx, r8, r9, EMIT_REMEMBERED_SET }, | 5747   { rbx, r8, r9, EMIT_REMEMBERED_SET }, | 
| 5664   // KeyedStoreIC::GenerateGeneric. | 5748   // KeyedStoreIC::GenerateGeneric. | 
| 5665   { rbx, rdx, rcx, EMIT_REMEMBERED_SET}, | 5749   { rbx, rdx, rcx, EMIT_REMEMBERED_SET}, | 
| 5666   // KeyedStoreStubCompiler::GenerateStoreFastElement. | 5750   // KeyedStoreStubCompiler::GenerateStoreFastElement. | 
| 5667   { rdi, rdx, rcx, EMIT_REMEMBERED_SET}, | 5751   { rdi, rdx, rcx, EMIT_REMEMBERED_SET}, | 
|  | 5752   // ElementsTransitionGenerator::GenerateSmiOnlyToObject | 
|  | 5753   // and ElementsTransitionGenerator::GenerateSmiOnlyToObject | 
|  | 5754   // and ElementsTransitionGenerator::GenerateDoubleToObject | 
|  | 5755   { rdx, rbx, rdi, EMIT_REMEMBERED_SET}, | 
|  | 5756   // ElementsTransitionGenerator::GenerateSmiOnlyToDouble | 
|  | 5757   // and ElementsTransitionGenerator::GenerateDoubleToObject | 
|  | 5758   { rdx, r11, r15, EMIT_REMEMBERED_SET}, | 
|  | 5759   // ElementsTransitionGenerator::GenerateDoubleToObject | 
|  | 5760   { r11, rax, r15, EMIT_REMEMBERED_SET}, | 
| 5668   // Null termination. | 5761   // Null termination. | 
| 5669   { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} | 5762   { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} | 
| 5670 }; | 5763 }; | 
| 5671 | 5764 | 
| 5672 | 5765 | 
| 5673 bool RecordWriteStub::IsPregenerated() { | 5766 bool RecordWriteStub::IsPregenerated() { | 
| 5674   for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 5767   for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; | 
| 5675        !entry->object.is(no_reg); | 5768        !entry->object.is(no_reg); | 
| 5676        entry++) { | 5769        entry++) { | 
| 5677     if (object_.is(entry->object) && | 5770     if (object_.is(entry->object) && | 
| (...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5905   } | 5998   } | 
| 5906 | 5999 | 
| 5907   __ bind(&need_incremental_pop_object); | 6000   __ bind(&need_incremental_pop_object); | 
| 5908   __ pop(regs_.object()); | 6001   __ pop(regs_.object()); | 
| 5909 | 6002 | 
| 5910   __ bind(&need_incremental); | 6003   __ bind(&need_incremental); | 
| 5911 | 6004 | 
| 5912   // Fall through when we need to inform the incremental marker. | 6005   // Fall through when we need to inform the incremental marker. | 
| 5913 } | 6006 } | 
| 5914 | 6007 | 
| 5915 |  | 
| 5916 #undef __ | 6008 #undef __ | 
| 5917 | 6009 | 
| 5918 } }  // namespace v8::internal | 6010 } }  // namespace v8::internal | 
| 5919 | 6011 | 
| 5920 #endif  // V8_TARGET_ARCH_X64 | 6012 #endif  // V8_TARGET_ARCH_X64 | 
| OLD | NEW | 
|---|