OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/ic/call-optimization.h" | 9 #include "src/ic/call-optimization.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
314 | 314 |
315 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, | 315 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, |
316 Handle<Name> name) { | 316 Handle<Name> name) { |
317 if (!label->is_unused()) { | 317 if (!label->is_unused()) { |
318 __ bind(label); | 318 __ bind(label); |
319 __ li(this->name(), Operand(name)); | 319 __ li(this->name(), Operand(name)); |
320 } | 320 } |
321 } | 321 } |
322 | 322 |
323 | 323 |
324 // Generate StoreTransition code, value is passed in a0 register. | 324 void NamedStoreHandlerCompiler::GenerateRestoreNameAndMap( |
325 // After executing generated code, the receiver_reg and name_reg | 325 Handle<Name> name, Handle<Map> transition) { |
326 // may be clobbered. | 326 __ li(this->name(), Operand(name)); |
327 void NamedStoreHandlerCompiler::GenerateStoreTransition( | 327 __ li(StoreTransitionDescriptor::MapRegister(), Operand(transition)); |
328 Handle<Map> transition, Handle<Name> name, Register receiver_reg, | |
329 Register storage_reg, Register value_reg, Register scratch1, | |
330 Register scratch2, Register scratch3, Label* miss_label, Label* slow) { | |
331 // a0 : value. | |
332 Label exit; | |
333 | |
334 int descriptor = transition->LastAdded(); | |
335 DescriptorArray* descriptors = transition->instance_descriptors(); | |
336 PropertyDetails details = descriptors->GetDetails(descriptor); | |
337 Representation representation = details.representation(); | |
338 DCHECK(!representation.IsNone()); | |
339 | |
340 if (details.type() == CONSTANT) { | |
341 Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); | |
342 __ li(scratch1, constant); | |
343 __ Branch(miss_label, ne, value_reg, Operand(scratch1)); | |
344 } else if (representation.IsSmi()) { | |
345 __ JumpIfNotSmi(value_reg, miss_label); | |
346 } else if (representation.IsHeapObject()) { | |
347 __ JumpIfSmi(value_reg, miss_label); | |
348 HeapType* field_type = descriptors->GetFieldType(descriptor); | |
349 HeapType::Iterator<Map> it = field_type->Classes(); | |
350 Handle<Map> current; | |
351 if (!it.Done()) { | |
352 __ ld(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); | |
353 Label do_store; | |
354 while (true) { | |
355 // Do the CompareMap() directly within the Branch() functions. | |
356 current = it.Current(); | |
357 it.Advance(); | |
358 if (it.Done()) { | |
359 __ Branch(miss_label, ne, scratch1, Operand(current)); | |
360 break; | |
361 } | |
362 __ Branch(&do_store, eq, scratch1, Operand(current)); | |
363 } | |
364 __ bind(&do_store); | |
365 } | |
366 } else if (representation.IsDouble()) { | |
367 Label do_store, heap_number; | |
368 __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex); | |
369 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, | |
370 TAG_RESULT, MUTABLE); | |
371 | |
372 __ JumpIfNotSmi(value_reg, &heap_number); | |
373 __ SmiUntag(scratch1, value_reg); | |
374 __ mtc1(scratch1, f6); | |
375 __ cvt_d_w(f4, f6); | |
376 __ jmp(&do_store); | |
377 | |
378 __ bind(&heap_number); | |
379 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label, | |
380 DONT_DO_SMI_CHECK); | |
381 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
382 | |
383 __ bind(&do_store); | |
384 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); | |
385 } | |
386 | |
387 // Stub never generated for objects that require access checks. | |
388 DCHECK(!transition->is_access_check_needed()); | |
389 | |
390 // Perform map transition for the receiver if necessary. | |
391 if (details.type() == FIELD && | |
392 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { | |
393 // The properties must be extended before we can store the value. | |
394 __ li(ExtendStorageDescriptor::NameRegister(), Operand(name)); | |
395 __ li(ExtendStorageDescriptor::MapRegister(), Operand(transition)); | |
396 | |
397 ExtendStorageStub stub(isolate(), | |
398 FieldIndex::ForDescriptor(*transition, descriptor), | |
399 representation); | |
400 GenerateTailCall(masm(), stub.GetCode()); | |
401 return; | |
402 } | |
403 | |
404 // Update the map of the object. | |
405 __ li(scratch1, Operand(transition)); | |
406 __ sd(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); | |
407 | |
408 // Update the write barrier for the map field. | |
409 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2, | |
410 kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, | |
411 OMIT_SMI_CHECK); | |
412 | |
413 if (details.type() == CONSTANT) { | |
414 DCHECK(value_reg.is(a0)); | |
415 __ Ret(USE_DELAY_SLOT); | |
416 __ mov(v0, a0); | |
417 return; | |
418 } | |
419 | |
420 int index = transition->instance_descriptors()->GetFieldIndex( | |
421 transition->LastAdded()); | |
422 | |
423 // Adjust for the number of properties stored in the object. Even in the | |
424 // face of a transition we can use the old map here because the size of the | |
425 // object and the number of in-object properties is not going to change. | |
426 index -= transition->inobject_properties(); | |
427 | |
428 // TODO(verwaest): Share this code as a code stub. | |
429 SmiCheck smi_check = | |
430 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | |
431 if (index < 0) { | |
432 // Set the property straight into the object. | |
433 int offset = transition->instance_size() + (index * kPointerSize); | |
434 if (representation.IsDouble()) { | |
435 __ sd(storage_reg, FieldMemOperand(receiver_reg, offset)); | |
436 } else { | |
437 __ sd(value_reg, FieldMemOperand(receiver_reg, offset)); | |
438 } | |
439 | |
440 if (!representation.IsSmi()) { | |
441 // Update the write barrier for the array address. | |
442 if (!representation.IsDouble()) { | |
443 __ mov(storage_reg, value_reg); | |
444 } | |
445 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1, | |
446 kRAHasNotBeenSaved, kDontSaveFPRegs, | |
447 EMIT_REMEMBERED_SET, smi_check); | |
448 } | |
449 } else { | |
450 // Write to the properties array. | |
451 int offset = index * kPointerSize + FixedArray::kHeaderSize; | |
452 // Get the properties array | |
453 __ ld(scratch1, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | |
454 if (representation.IsDouble()) { | |
455 __ sd(storage_reg, FieldMemOperand(scratch1, offset)); | |
456 } else { | |
457 __ sd(value_reg, FieldMemOperand(scratch1, offset)); | |
458 } | |
459 | |
460 if (!representation.IsSmi()) { | |
461 // Update the write barrier for the array address. | |
462 if (!representation.IsDouble()) { | |
463 __ mov(storage_reg, value_reg); | |
464 } | |
465 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg, | |
466 kRAHasNotBeenSaved, kDontSaveFPRegs, | |
467 EMIT_REMEMBERED_SET, smi_check); | |
468 } | |
469 } | |
470 | |
471 // Return the value (register v0). | |
472 DCHECK(value_reg.is(a0)); | |
473 __ bind(&exit); | |
474 __ Ret(USE_DELAY_SLOT); | |
475 __ mov(v0, a0); | |
476 } | 328 } |
477 | 329 |
478 | 330 |
479 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, | 331 void NamedStoreHandlerCompiler::GenerateConstantCheck(Object* constant, |
480 Register value_reg, | 332 Register value_reg, |
481 Label* miss_label) { | 333 Label* miss_label) { |
482 DCHECK(lookup->representation().IsHeapObject()); | 334 __ li(scratch1(), handle(constant, isolate())); |
483 __ JumpIfSmi(value_reg, miss_label); | 335 __ Branch(miss_label, ne, value_reg, Operand(scratch1())); |
484 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes(); | |
485 __ ld(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset)); | |
486 Label do_store; | |
487 Handle<Map> current; | |
488 while (true) { | |
489 // Do the CompareMap() directly within the Branch() functions. | |
490 current = it.Current(); | |
491 it.Advance(); | |
492 if (it.Done()) { | |
493 __ Branch(miss_label, ne, scratch1(), Operand(current)); | |
494 break; | |
495 } | |
496 __ Branch(&do_store, eq, scratch1(), Operand(current)); | |
497 } | |
498 __ bind(&do_store); | |
499 | |
500 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(), | |
501 lookup->representation()); | |
502 GenerateTailCall(masm(), stub.GetCode()); | |
503 } | 336 } |
504 | 337 |
505 | 338 |
| 339 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(HeapType* field_type, |
| 340 Register value_reg, |
| 341 Label* miss_label) { |
| 342 __ JumpIfSmi(value_reg, miss_label); |
| 343 HeapType::Iterator<Map> it = field_type->Classes(); |
| 344 if (!it.Done()) { |
| 345 __ ld(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 346 Label do_store; |
| 347 Handle<Map> current; |
| 348 while (true) { |
| 349 // Do the CompareMap() directly within the Branch() functions. |
| 350 current = it.Current(); |
| 351 it.Advance(); |
| 352 if (it.Done()) { |
| 353 __ Branch(miss_label, ne, scratch1(), Operand(current)); |
| 354 break; |
| 355 } |
| 356 __ Branch(&do_store, eq, scratch1(), Operand(current)); |
| 357 } |
| 358 __ bind(&do_store); |
| 359 } |
| 360 } |
| 361 |
| 362 |
506 Register PropertyHandlerCompiler::CheckPrototypes( | 363 Register PropertyHandlerCompiler::CheckPrototypes( |
507 Register object_reg, Register holder_reg, Register scratch1, | 364 Register object_reg, Register holder_reg, Register scratch1, |
508 Register scratch2, Handle<Name> name, Label* miss, | 365 Register scratch2, Handle<Name> name, Label* miss, |
509 PrototypeCheckType check) { | 366 PrototypeCheckType check) { |
510 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); | 367 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); |
511 | 368 |
512 // Make sure there's no overlap between holder and object registers. | 369 // Make sure there's no overlap between holder and object registers. |
513 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); | 370 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); |
514 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && | 371 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && |
515 !scratch2.is(scratch1)); | 372 !scratch2.is(scratch1)); |
(...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
830 // Return the generated code. | 687 // Return the generated code. |
831 return GetCode(kind(), Code::NORMAL, name); | 688 return GetCode(kind(), Code::NORMAL, name); |
832 } | 689 } |
833 | 690 |
834 | 691 |
835 #undef __ | 692 #undef __ |
836 } | 693 } |
837 } // namespace v8::internal | 694 } // namespace v8::internal |
838 | 695 |
839 #endif // V8_TARGET_ARCH_MIPS64 | 696 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |