OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
408 __ mov(v0, scratch1); | 408 __ mov(v0, scratch1); |
409 __ Ret(); | 409 __ Ret(); |
410 } | 410 } |
411 | 411 |
412 | 412 |
413 // Generate StoreField code, value is passed in a0 register. | 413 // Generate StoreField code, value is passed in a0 register. |
414 // After executing generated code, the receiver_reg and name_reg | 414 // After executing generated code, the receiver_reg and name_reg |
415 // may be clobbered. | 415 // may be clobbered. |
416 void StubCompiler::GenerateStoreField(MacroAssembler* masm, | 416 void StubCompiler::GenerateStoreField(MacroAssembler* masm, |
417 Handle<JSObject> object, | 417 Handle<JSObject> object, |
418 int index, | 418 LookupResult* lookup, |
419 Handle<Map> transition, | 419 Handle<Map> transition, |
420 Handle<Name> name, | 420 Handle<Name> name, |
421 Register receiver_reg, | 421 Register receiver_reg, |
422 Register name_reg, | 422 Register name_reg, |
423 Register value_reg, | 423 Register value_reg, |
424 Register scratch1, | 424 Register scratch1, |
425 Register scratch2, | 425 Register scratch2, |
426 Label* miss_label, | 426 Label* miss_label, |
427 Label* miss_restore_name) { | 427 Label* miss_restore_name) { |
428 // a0 : value. | 428 // a0 : value. |
429 Label exit; | 429 Label exit; |
430 | 430 |
431 LookupResult lookup(masm->isolate()); | |
432 object->Lookup(*name, &lookup); | |
433 if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) { | |
434 // In sloppy mode, we could just return the value and be done. However, we | |
435 // might be in strict mode, where we have to throw. Since we cannot tell, | |
436 // go into slow case unconditionally. | |
437 __ jmp(miss_label); | |
438 return; | |
439 } | |
440 | |
441 // Check that the map of the object hasn't changed. | 431 // Check that the map of the object hasn't changed. |
442 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS | 432 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS |
443 : REQUIRE_EXACT_MAP; | 433 : REQUIRE_EXACT_MAP; |
444 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label, | 434 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label, |
445 DO_SMI_CHECK, mode); | 435 DO_SMI_CHECK, mode); |
446 | 436 |
447 // Perform global security token check if needed. | 437 // Perform global security token check if needed. |
448 if (object->IsJSGlobalProxy()) { | 438 if (object->IsJSGlobalProxy()) { |
449 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); | 439 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label); |
450 } | 440 } |
451 | 441 |
452 // Check that we are allowed to write this. | 442 // Check that we are allowed to write this. |
453 if (!transition.is_null() && object->GetPrototype()->IsJSObject()) { | 443 if (!transition.is_null() && object->GetPrototype()->IsJSObject()) { |
454 JSObject* holder; | 444 JSObject* holder; |
455 if (lookup.IsFound()) { | 445 // holder == object indicates that no property was found. |
456 holder = lookup.holder(); | 446 if (lookup->holder() != *object) { |
| 447 holder = lookup->holder(); |
457 } else { | 448 } else { |
458 // Find the top object. | 449 // Find the top object. |
459 holder = *object; | 450 holder = *object; |
460 do { | 451 do { |
461 holder = JSObject::cast(holder->GetPrototype()); | 452 holder = JSObject::cast(holder->GetPrototype()); |
462 } while (holder->GetPrototype()->IsJSObject()); | 453 } while (holder->GetPrototype()->IsJSObject()); |
463 } | 454 } |
464 CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg, | 455 Register holder_reg = CheckPrototypes( |
465 scratch1, scratch2, name, miss_restore_name); | 456 object, receiver_reg, Handle<JSObject>(holder), name_reg, |
| 457 scratch1, scratch2, name, miss_restore_name); |
| 458 // If no property was found, and the holder (the last object in the |
| 459 // prototype chain) is in slow mode, we need to do a negative lookup on the |
| 460 // holder. |
| 461 if (lookup->holder() == *object && |
| 462 !holder->HasFastProperties() && |
| 463 !holder->IsJSGlobalProxy() && |
| 464 !holder->IsJSGlobalObject()) { |
| 465 GenerateDictionaryNegativeLookup( |
| 466 masm, miss_restore_name, holder_reg, name, scratch1, scratch2); |
| 467 } |
466 } | 468 } |
467 | 469 |
468 // Stub never generated for non-global objects that require access | 470 // Stub never generated for non-global objects that require access |
469 // checks. | 471 // checks. |
470 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 472 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
471 | 473 |
472 // Perform map transition for the receiver if necessary. | 474 // Perform map transition for the receiver if necessary. |
473 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { | 475 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { |
474 // The properties must be extended before we can store the value. | 476 // The properties must be extended before we can store the value. |
475 // We jump to a runtime call that extends the properties array. | 477 // We jump to a runtime call that extends the properties array. |
476 __ push(receiver_reg); | 478 __ push(receiver_reg); |
477 __ li(a2, Operand(transition)); | 479 __ li(a2, Operand(transition)); |
478 __ Push(a2, a0); | 480 __ Push(a2, a0); |
479 __ TailCallExternalReference( | 481 __ TailCallExternalReference( |
480 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 482 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
481 masm->isolate()), | 483 masm->isolate()), |
482 3, 1); | 484 3, 1); |
483 return; | 485 return; |
484 } | 486 } |
485 | 487 |
| 488 int index; |
486 if (!transition.is_null()) { | 489 if (!transition.is_null()) { |
487 // Update the map of the object. | 490 // Update the map of the object. |
488 __ li(scratch1, Operand(transition)); | 491 __ li(scratch1, Operand(transition)); |
489 __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); | 492 __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
490 | 493 |
491 // Update the write barrier for the map field and pass the now unused | 494 // Update the write barrier for the map field and pass the now unused |
492 // name_reg as scratch register. | 495 // name_reg as scratch register. |
493 __ RecordWriteField(receiver_reg, | 496 __ RecordWriteField(receiver_reg, |
494 HeapObject::kMapOffset, | 497 HeapObject::kMapOffset, |
495 scratch1, | 498 scratch1, |
496 name_reg, | 499 name_reg, |
497 kRAHasNotBeenSaved, | 500 kRAHasNotBeenSaved, |
498 kDontSaveFPRegs, | 501 kDontSaveFPRegs, |
499 OMIT_REMEMBERED_SET, | 502 OMIT_REMEMBERED_SET, |
500 OMIT_SMI_CHECK); | 503 OMIT_SMI_CHECK); |
| 504 index = transition->instance_descriptors()->GetFieldIndex( |
| 505 transition->LastAdded()); |
| 506 } else { |
| 507 index = lookup->GetFieldIndex().field_index(); |
501 } | 508 } |
502 | 509 |
503 // Adjust for the number of properties stored in the object. Even in the | 510 // Adjust for the number of properties stored in the object. Even in the |
504 // face of a transition we can use the old map here because the size of the | 511 // face of a transition we can use the old map here because the size of the |
505 // object and the number of in-object properties is not going to change. | 512 // object and the number of in-object properties is not going to change. |
506 index -= object->map()->inobject_properties(); | 513 index -= object->map()->inobject_properties(); |
507 | 514 |
508 if (index < 0) { | 515 if (index < 0) { |
509 // Set the property straight into the object. | 516 // Set the property straight into the object. |
510 int offset = object->map()->instance_size() + (index * kPointerSize); | 517 int offset = object->map()->instance_size() + (index * kPointerSize); |
(...skipping 3433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3944 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3951 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
3945 } | 3952 } |
3946 } | 3953 } |
3947 | 3954 |
3948 | 3955 |
3949 #undef __ | 3956 #undef __ |
3950 | 3957 |
3951 } } // namespace v8::internal | 3958 } } // namespace v8::internal |
3952 | 3959 |
3953 #endif // V8_TARGET_ARCH_MIPS | 3960 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |