| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 266 __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 266 __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 267 } | 267 } |
| 268 | 268 |
| 269 | 269 |
| 270 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, | 270 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, |
| 271 Register dst, | 271 Register dst, |
| 272 Register src, | 272 Register src, |
| 273 bool inobject, | 273 bool inobject, |
| 274 int index, | 274 int index, |
| 275 Representation representation) { | 275 Representation representation) { |
| 276 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); | 276 ASSERT(!representation.IsDouble()); |
| 277 USE(representation); | 277 USE(representation); |
| 278 if (inobject) { | 278 if (inobject) { |
| 279 int offset = index * kPointerSize; | 279 int offset = index * kPointerSize; |
| 280 __ Ldr(dst, FieldMemOperand(src, offset)); | 280 __ Ldr(dst, FieldMemOperand(src, offset)); |
| 281 } else { | 281 } else { |
| 282 // Calculate the offset into the properties array. | 282 // Calculate the offset into the properties array. |
| 283 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 283 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 284 __ Ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); | 284 __ Ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); |
| 285 __ Ldr(dst, FieldMemOperand(dst, offset)); | 285 __ Ldr(dst, FieldMemOperand(dst, offset)); |
| 286 } | 286 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 299 // Check that the object is a JS array. | 299 // Check that the object is a JS array. |
| 300 __ JumpIfNotObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE, | 300 __ JumpIfNotObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE, |
| 301 miss_label); | 301 miss_label); |
| 302 | 302 |
| 303 // Load length directly from the JS array. | 303 // Load length directly from the JS array. |
| 304 __ Ldr(x0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 304 __ Ldr(x0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
| 305 __ Ret(); | 305 __ Ret(); |
| 306 } | 306 } |
| 307 | 307 |
| 308 | 308 |
| 309 // Generate code to check if an object is a string. If the object is a | |
| 310 // heap object, its map's instance type is left in the scratch1 register. | |
| 311 static void GenerateStringCheck(MacroAssembler* masm, | |
| 312 Register receiver, | |
| 313 Register scratch1, | |
| 314 Label* smi, | |
| 315 Label* non_string_object) { | |
| 316 // Check that the receiver isn't a smi. | |
| 317 __ JumpIfSmi(receiver, smi); | |
| 318 | |
| 319 // Get the object's instance type filed. | |
| 320 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
| 321 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | |
| 322 // Check if the "not string" bit is set. | |
| 323 __ Tbnz(scratch1, MaskToBit(kNotStringTag), non_string_object); | |
| 324 } | |
| 325 | |
| 326 | |
| 327 // Generate code to load the length from a string object and return the length. | |
| 328 // If the receiver object is not a string or a wrapped string object the | |
| 329 // execution continues at the miss label. The register containing the | |
| 330 // receiver is not clobbered if the receiver is not a string. | |
| 331 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, | |
| 332 Register receiver, | |
| 333 Register scratch1, | |
| 334 Register scratch2, | |
| 335 Label* miss) { | |
| 336 // Input registers can't alias because we don't want to clobber the | |
| 337 // receiver register if the object is not a string. | |
| 338 ASSERT(!AreAliased(receiver, scratch1, scratch2)); | |
| 339 | |
| 340 Label check_wrapper; | |
| 341 | |
| 342 // Check if the object is a string leaving the instance type in the | |
| 343 // scratch1 register. | |
| 344 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper); | |
| 345 | |
| 346 // Load length directly from the string. | |
| 347 __ Ldr(x0, FieldMemOperand(receiver, String::kLengthOffset)); | |
| 348 __ Ret(); | |
| 349 | |
| 350 // Check if the object is a JSValue wrapper. | |
| 351 __ Bind(&check_wrapper); | |
| 352 __ Cmp(scratch1, Operand(JS_VALUE_TYPE)); | |
| 353 __ B(ne, miss); | |
| 354 | |
| 355 // Unwrap the value and check if the wrapped value is a string. | |
| 356 __ Ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset)); | |
| 357 GenerateStringCheck(masm, scratch1, scratch2, miss, miss); | |
| 358 __ Ldr(x0, FieldMemOperand(scratch1, String::kLengthOffset)); | |
| 359 __ Ret(); | |
| 360 } | |
| 361 | |
| 362 | |
| 363 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, | 309 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, |
| 364 Register receiver, | 310 Register receiver, |
| 365 Register scratch1, | 311 Register scratch1, |
| 366 Register scratch2, | 312 Register scratch2, |
| 367 Label* miss_label) { | 313 Label* miss_label) { |
| 368 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); | 314 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); |
| 369 // TryGetFunctionPrototype can't put the result directly in x0 because the | 315 // TryGetFunctionPrototype can't put the result directly in x0 because the |
| 370 // 3 inputs registers can't alias and we call this function from | 316 // 3 inputs registers can't alias and we call this function from |
| 371 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly | 317 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly |
| 372 // move the result in x0. | 318 // move the result in x0. |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 436 DescriptorArray* descriptors = transition->instance_descriptors(); | 382 DescriptorArray* descriptors = transition->instance_descriptors(); |
| 437 PropertyDetails details = descriptors->GetDetails(descriptor); | 383 PropertyDetails details = descriptors->GetDetails(descriptor); |
| 438 Representation representation = details.representation(); | 384 Representation representation = details.representation(); |
| 439 ASSERT(!representation.IsNone()); | 385 ASSERT(!representation.IsNone()); |
| 440 | 386 |
| 441 if (details.type() == CONSTANT) { | 387 if (details.type() == CONSTANT) { |
| 442 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); | 388 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); |
| 443 __ LoadObject(scratch1, constant); | 389 __ LoadObject(scratch1, constant); |
| 444 __ Cmp(value_reg, scratch1); | 390 __ Cmp(value_reg, scratch1); |
| 445 __ B(ne, miss_label); | 391 __ B(ne, miss_label); |
| 446 } else if (FLAG_track_fields && representation.IsSmi()) { | 392 } else if (representation.IsSmi()) { |
| 447 __ JumpIfNotSmi(value_reg, miss_label); | 393 __ JumpIfNotSmi(value_reg, miss_label); |
| 448 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 394 } else if (representation.IsHeapObject()) { |
| 449 __ JumpIfSmi(value_reg, miss_label); | 395 __ JumpIfSmi(value_reg, miss_label); |
| 450 } else if (FLAG_track_double_fields && representation.IsDouble()) { | 396 } else if (representation.IsDouble()) { |
| 451 Label do_store, heap_number; | 397 Label do_store, heap_number; |
| 452 __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2); | 398 __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2); |
| 453 | 399 |
| 454 // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register? | 400 // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register? |
| 455 // It's only used in Fcmp, but it's not really safe to use it like this. | 401 // It's only used in Fcmp, but it's not really safe to use it like this. |
| 456 __ JumpIfNotSmi(value_reg, &heap_number); | 402 __ JumpIfNotSmi(value_reg, &heap_number); |
| 457 __ SmiUntagToDouble(fp_scratch, value_reg); | 403 __ SmiUntagToDouble(fp_scratch, value_reg); |
| 458 __ B(&do_store); | 404 __ B(&do_store); |
| 459 | 405 |
| 460 __ Bind(&heap_number); | 406 __ Bind(&heap_number); |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 513 index -= object->map()->inobject_properties(); | 459 index -= object->map()->inobject_properties(); |
| 514 | 460 |
| 515 // TODO(verwaest): Share this code as a code stub. | 461 // TODO(verwaest): Share this code as a code stub. |
| 516 SmiCheck smi_check = representation.IsTagged() | 462 SmiCheck smi_check = representation.IsTagged() |
| 517 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 463 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
| 518 if (index < 0) { | 464 if (index < 0) { |
| 519 // Set the property straight into the object. | 465 // Set the property straight into the object. |
| 520 int offset = object->map()->instance_size() + (index * kPointerSize); | 466 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 521 // TODO(jbramley): This construct appears in several places in this | 467 // TODO(jbramley): This construct appears in several places in this |
| 522 // function. Try to clean it up, perhaps using a result_reg. | 468 // function. Try to clean it up, perhaps using a result_reg. |
| 523 if (FLAG_track_double_fields && representation.IsDouble()) { | 469 if (representation.IsDouble()) { |
| 524 __ Str(storage_reg, FieldMemOperand(receiver_reg, offset)); | 470 __ Str(storage_reg, FieldMemOperand(receiver_reg, offset)); |
| 525 } else { | 471 } else { |
| 526 __ Str(value_reg, FieldMemOperand(receiver_reg, offset)); | 472 __ Str(value_reg, FieldMemOperand(receiver_reg, offset)); |
| 527 } | 473 } |
| 528 | 474 |
| 529 if (!FLAG_track_fields || !representation.IsSmi()) { | 475 if (!representation.IsSmi()) { |
| 530 // Update the write barrier for the array address. | 476 // Update the write barrier for the array address. |
| 531 if (!FLAG_track_double_fields || !representation.IsDouble()) { | 477 if (!representation.IsDouble()) { |
| 532 __ Mov(storage_reg, value_reg); | 478 __ Mov(storage_reg, value_reg); |
| 533 } | 479 } |
| 534 __ RecordWriteField(receiver_reg, | 480 __ RecordWriteField(receiver_reg, |
| 535 offset, | 481 offset, |
| 536 storage_reg, | 482 storage_reg, |
| 537 scratch1, | 483 scratch1, |
| 538 kLRHasNotBeenSaved, | 484 kLRHasNotBeenSaved, |
| 539 kDontSaveFPRegs, | 485 kDontSaveFPRegs, |
| 540 EMIT_REMEMBERED_SET, | 486 EMIT_REMEMBERED_SET, |
| 541 smi_check); | 487 smi_check); |
| 542 } | 488 } |
| 543 } else { | 489 } else { |
| 544 // Write to the properties array. | 490 // Write to the properties array. |
| 545 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 491 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 546 // Get the properties array | 492 // Get the properties array |
| 547 __ Ldr(scratch1, | 493 __ Ldr(scratch1, |
| 548 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 494 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 549 if (FLAG_track_double_fields && representation.IsDouble()) { | 495 if (representation.IsDouble()) { |
| 550 __ Str(storage_reg, FieldMemOperand(scratch1, offset)); | 496 __ Str(storage_reg, FieldMemOperand(scratch1, offset)); |
| 551 } else { | 497 } else { |
| 552 __ Str(value_reg, FieldMemOperand(scratch1, offset)); | 498 __ Str(value_reg, FieldMemOperand(scratch1, offset)); |
| 553 } | 499 } |
| 554 | 500 |
| 555 if (!FLAG_track_fields || !representation.IsSmi()) { | 501 if (!representation.IsSmi()) { |
| 556 // Update the write barrier for the array address. | 502 // Update the write barrier for the array address. |
| 557 if (!FLAG_track_double_fields || !representation.IsDouble()) { | 503 if (!representation.IsDouble()) { |
| 558 __ Mov(storage_reg, value_reg); | 504 __ Mov(storage_reg, value_reg); |
| 559 } | 505 } |
| 560 __ RecordWriteField(scratch1, | 506 __ RecordWriteField(scratch1, |
| 561 offset, | 507 offset, |
| 562 storage_reg, | 508 storage_reg, |
| 563 receiver_reg, | 509 receiver_reg, |
| 564 kLRHasNotBeenSaved, | 510 kLRHasNotBeenSaved, |
| 565 kDontSaveFPRegs, | 511 kDontSaveFPRegs, |
| 566 EMIT_REMEMBERED_SET, | 512 EMIT_REMEMBERED_SET, |
| 567 smi_check); | 513 smi_check); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 597 | 543 |
| 598 int index = lookup->GetFieldIndex().field_index(); | 544 int index = lookup->GetFieldIndex().field_index(); |
| 599 | 545 |
| 600 // Adjust for the number of properties stored in the object. Even in the | 546 // Adjust for the number of properties stored in the object. Even in the |
| 601 // face of a transition we can use the old map here because the size of the | 547 // face of a transition we can use the old map here because the size of the |
| 602 // object and the number of in-object properties is not going to change. | 548 // object and the number of in-object properties is not going to change. |
| 603 index -= object->map()->inobject_properties(); | 549 index -= object->map()->inobject_properties(); |
| 604 | 550 |
| 605 Representation representation = lookup->representation(); | 551 Representation representation = lookup->representation(); |
| 606 ASSERT(!representation.IsNone()); | 552 ASSERT(!representation.IsNone()); |
| 607 if (FLAG_track_fields && representation.IsSmi()) { | 553 if (representation.IsSmi()) { |
| 608 __ JumpIfNotSmi(value_reg, miss_label); | 554 __ JumpIfNotSmi(value_reg, miss_label); |
| 609 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { | 555 } else if (representation.IsHeapObject()) { |
| 610 __ JumpIfSmi(value_reg, miss_label); | 556 __ JumpIfSmi(value_reg, miss_label); |
| 611 } else if (FLAG_track_double_fields && representation.IsDouble()) { | 557 } else if (representation.IsDouble()) { |
| 612 // Load the double storage. | 558 // Load the double storage. |
| 613 if (index < 0) { | 559 if (index < 0) { |
| 614 int offset = (index * kPointerSize) + object->map()->instance_size(); | 560 int offset = (index * kPointerSize) + object->map()->instance_size(); |
| 615 __ Ldr(scratch1, FieldMemOperand(receiver_reg, offset)); | 561 __ Ldr(scratch1, FieldMemOperand(receiver_reg, offset)); |
| 616 } else { | 562 } else { |
| 617 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; | 563 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; |
| 618 __ Ldr(scratch1, | 564 __ Ldr(scratch1, |
| 619 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 565 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 620 __ Ldr(scratch1, FieldMemOperand(scratch1, offset)); | 566 __ Ldr(scratch1, FieldMemOperand(scratch1, offset)); |
| 621 } | 567 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 643 } | 589 } |
| 644 | 590 |
| 645 // TODO(verwaest): Share this code as a code stub. | 591 // TODO(verwaest): Share this code as a code stub. |
| 646 SmiCheck smi_check = representation.IsTagged() | 592 SmiCheck smi_check = representation.IsTagged() |
| 647 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 593 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
| 648 if (index < 0) { | 594 if (index < 0) { |
| 649 // Set the property straight into the object. | 595 // Set the property straight into the object. |
| 650 int offset = object->map()->instance_size() + (index * kPointerSize); | 596 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 651 __ Str(value_reg, FieldMemOperand(receiver_reg, offset)); | 597 __ Str(value_reg, FieldMemOperand(receiver_reg, offset)); |
| 652 | 598 |
| 653 if (!FLAG_track_fields || !representation.IsSmi()) { | 599 if (!representation.IsSmi()) { |
| 654 // Skip updating write barrier if storing a smi. | 600 // Skip updating write barrier if storing a smi. |
| 655 __ JumpIfSmi(value_reg, &exit); | 601 __ JumpIfSmi(value_reg, &exit); |
| 656 | 602 |
| 657 // Update the write barrier for the array address. | 603 // Update the write barrier for the array address. |
| 658 // Pass the now unused name_reg as a scratch register. | 604 // Pass the now unused name_reg as a scratch register. |
| 659 __ Mov(name_reg, value_reg); | 605 __ Mov(name_reg, value_reg); |
| 660 __ RecordWriteField(receiver_reg, | 606 __ RecordWriteField(receiver_reg, |
| 661 offset, | 607 offset, |
| 662 name_reg, | 608 name_reg, |
| 663 scratch1, | 609 scratch1, |
| 664 kLRHasNotBeenSaved, | 610 kLRHasNotBeenSaved, |
| 665 kDontSaveFPRegs, | 611 kDontSaveFPRegs, |
| 666 EMIT_REMEMBERED_SET, | 612 EMIT_REMEMBERED_SET, |
| 667 smi_check); | 613 smi_check); |
| 668 } | 614 } |
| 669 } else { | 615 } else { |
| 670 // Write to the properties array. | 616 // Write to the properties array. |
| 671 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 617 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 672 // Get the properties array | 618 // Get the properties array |
| 673 __ Ldr(scratch1, | 619 __ Ldr(scratch1, |
| 674 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 620 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 675 __ Str(value_reg, FieldMemOperand(scratch1, offset)); | 621 __ Str(value_reg, FieldMemOperand(scratch1, offset)); |
| 676 | 622 |
| 677 if (!FLAG_track_fields || !representation.IsSmi()) { | 623 if (!representation.IsSmi()) { |
| 678 // Skip updating write barrier if storing a smi. | 624 // Skip updating write barrier if storing a smi. |
| 679 __ JumpIfSmi(value_reg, &exit); | 625 __ JumpIfSmi(value_reg, &exit); |
| 680 | 626 |
| 681 // Update the write barrier for the array address. | 627 // Update the write barrier for the array address. |
| 682 // Ok to clobber receiver_reg and name_reg, since we return. | 628 // Ok to clobber receiver_reg and name_reg, since we return. |
| 683 __ Mov(name_reg, value_reg); | 629 __ Mov(name_reg, value_reg); |
| 684 __ RecordWriteField(scratch1, | 630 __ RecordWriteField(scratch1, |
| 685 offset, | 631 offset, |
| 686 name_reg, | 632 name_reg, |
| 687 receiver_reg, | 633 receiver_reg, |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 896 } | 842 } |
| 897 | 843 |
| 898 if (depth != 1 || check == CHECK_ALL_MAPS) { | 844 if (depth != 1 || check == CHECK_ALL_MAPS) { |
| 899 __ CheckMap(map_reg, current_map, miss, DONT_DO_SMI_CHECK); | 845 __ CheckMap(map_reg, current_map, miss, DONT_DO_SMI_CHECK); |
| 900 } | 846 } |
| 901 | 847 |
| 902 // Check access rights to the global object. This has to happen after | 848 // Check access rights to the global object. This has to happen after |
| 903 // the map check so that we know that the object is actually a global | 849 // the map check so that we know that the object is actually a global |
| 904 // object. | 850 // object. |
| 905 if (current_map->IsJSGlobalProxyMap()) { | 851 if (current_map->IsJSGlobalProxyMap()) { |
| 906 __ CheckAccessGlobalProxy(reg, scratch2, miss); | 852 UseScratchRegisterScope temps(masm()); |
| 853 __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss); |
| 907 } else if (current_map->IsJSGlobalObjectMap()) { | 854 } else if (current_map->IsJSGlobalObjectMap()) { |
| 908 GenerateCheckPropertyCell( | 855 GenerateCheckPropertyCell( |
| 909 masm(), Handle<JSGlobalObject>::cast(current), name, | 856 masm(), Handle<JSGlobalObject>::cast(current), name, |
| 910 scratch2, miss); | 857 scratch2, miss); |
| 911 } | 858 } |
| 912 | 859 |
| 913 reg = holder_reg; // From now on the object will be in holder_reg. | 860 reg = holder_reg; // From now on the object will be in holder_reg. |
| 914 | 861 |
| 915 if (heap()->InNewSpace(*prototype)) { | 862 if (heap()->InNewSpace(*prototype)) { |
| 916 // The prototype is in new space; we cannot store a reference to it | 863 // The prototype is in new space; we cannot store a reference to it |
| (...skipping 16 matching lines...) Expand all Loading... |
| 933 // Check the holder map. | 880 // Check the holder map. |
| 934 if (depth != 0 || check == CHECK_ALL_MAPS) { | 881 if (depth != 0 || check == CHECK_ALL_MAPS) { |
| 935 // Check the holder map. | 882 // Check the holder map. |
| 936 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); | 883 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); |
| 937 } | 884 } |
| 938 | 885 |
| 939 // Perform security check for access to the global object. | 886 // Perform security check for access to the global object. |
| 940 ASSERT(current_map->IsJSGlobalProxyMap() || | 887 ASSERT(current_map->IsJSGlobalProxyMap() || |
| 941 !current_map->is_access_check_needed()); | 888 !current_map->is_access_check_needed()); |
| 942 if (current_map->IsJSGlobalProxyMap()) { | 889 if (current_map->IsJSGlobalProxyMap()) { |
| 943 __ CheckAccessGlobalProxy(reg, scratch1, miss); | 890 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss); |
| 944 } | 891 } |
| 945 | 892 |
| 946 // Return the register containing the holder. | 893 // Return the register containing the holder. |
| 947 return reg; | 894 return reg; |
| 948 } | 895 } |
| 949 | 896 |
| 950 | 897 |
| 951 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) { | 898 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) { |
| 952 if (!miss->is_unused()) { | 899 if (!miss->is_unused()) { |
| 953 Label success; | 900 Label success; |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1232 } | 1179 } |
| 1233 | 1180 |
| 1234 | 1181 |
| 1235 #undef __ | 1182 #undef __ |
| 1236 #define __ ACCESS_MASM(masm) | 1183 #define __ ACCESS_MASM(masm) |
| 1237 | 1184 |
| 1238 | 1185 |
| 1239 void StoreStubCompiler::GenerateStoreViaSetter( | 1186 void StoreStubCompiler::GenerateStoreViaSetter( |
| 1240 MacroAssembler* masm, | 1187 MacroAssembler* masm, |
| 1241 Handle<HeapType> type, | 1188 Handle<HeapType> type, |
| 1189 Register receiver, |
| 1242 Handle<JSFunction> setter) { | 1190 Handle<JSFunction> setter) { |
| 1243 // ----------- S t a t e ------------- | 1191 // ----------- S t a t e ------------- |
| 1244 // -- x0 : value | |
| 1245 // -- x1 : receiver | |
| 1246 // -- x2 : name | |
| 1247 // -- lr : return address | 1192 // -- lr : return address |
| 1248 // ----------------------------------- | 1193 // ----------------------------------- |
| 1249 Register value = x0; | |
| 1250 Register receiver = x1; | |
| 1251 Label miss; | 1194 Label miss; |
| 1252 | 1195 |
| 1253 { | 1196 { |
| 1254 FrameScope scope(masm, StackFrame::INTERNAL); | 1197 FrameScope scope(masm, StackFrame::INTERNAL); |
| 1255 | 1198 |
| 1256 // Save value register, so we can restore it later. | 1199 // Save value register, so we can restore it later. |
| 1257 __ Push(value); | 1200 __ Push(value()); |
| 1258 | 1201 |
| 1259 if (!setter.is_null()) { | 1202 if (!setter.is_null()) { |
| 1260 // Call the JavaScript setter with receiver and value on the stack. | 1203 // Call the JavaScript setter with receiver and value on the stack. |
| 1261 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | 1204 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 1262 // Swap in the global receiver. | 1205 // Swap in the global receiver. |
| 1263 __ Ldr(receiver, | 1206 __ Ldr(receiver, |
| 1264 FieldMemOperand( | 1207 FieldMemOperand( |
| 1265 receiver, JSGlobalObject::kGlobalReceiverOffset)); | 1208 receiver, JSGlobalObject::kGlobalReceiverOffset)); |
| 1266 } | 1209 } |
| 1267 __ Push(receiver, value); | 1210 __ Push(receiver, value()); |
| 1268 ParameterCount actual(1); | 1211 ParameterCount actual(1); |
| 1269 ParameterCount expected(setter); | 1212 ParameterCount expected(setter); |
| 1270 __ InvokeFunction(setter, expected, actual, | 1213 __ InvokeFunction(setter, expected, actual, |
| 1271 CALL_FUNCTION, NullCallWrapper()); | 1214 CALL_FUNCTION, NullCallWrapper()); |
| 1272 } else { | 1215 } else { |
| 1273 // If we generate a global code snippet for deoptimization only, remember | 1216 // If we generate a global code snippet for deoptimization only, remember |
| 1274 // the place to continue after deoptimization. | 1217 // the place to continue after deoptimization. |
| 1275 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | 1218 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
| 1276 } | 1219 } |
| 1277 | 1220 |
| 1278 // We have to return the passed value, not the return value of the setter. | 1221 // We have to return the passed value, not the return value of the setter. |
| 1279 __ Pop(value); | 1222 __ Pop(x0); |
| 1280 | 1223 |
| 1281 // Restore context register. | 1224 // Restore context register. |
| 1282 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1225 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 1283 } | 1226 } |
| 1284 __ Ret(); | 1227 __ Ret(); |
| 1285 } | 1228 } |
| 1286 | 1229 |
| 1287 | 1230 |
| 1288 #undef __ | 1231 #undef __ |
| 1289 #define __ ACCESS_MASM(masm()) | 1232 #define __ ACCESS_MASM(masm()) |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1336 } | 1279 } |
| 1337 | 1280 |
| 1338 | 1281 |
| 1339 Register* KeyedLoadStubCompiler::registers() { | 1282 Register* KeyedLoadStubCompiler::registers() { |
| 1340 // receiver, name/key, scratch1, scratch2, scratch3, scratch4. | 1283 // receiver, name/key, scratch1, scratch2, scratch3, scratch4. |
| 1341 static Register registers[] = { x1, x0, x2, x3, x4, x5 }; | 1284 static Register registers[] = { x1, x0, x2, x3, x4, x5 }; |
| 1342 return registers; | 1285 return registers; |
| 1343 } | 1286 } |
| 1344 | 1287 |
| 1345 | 1288 |
| 1289 Register StoreStubCompiler::value() { |
| 1290 return x0; |
| 1291 } |
| 1292 |
| 1293 |
| 1346 Register* StoreStubCompiler::registers() { | 1294 Register* StoreStubCompiler::registers() { |
| 1347 // receiver, name, value, scratch1, scratch2, scratch3. | 1295 // receiver, value, scratch1, scratch2, scratch3. |
| 1348 static Register registers[] = { x1, x2, x0, x3, x4, x5 }; | 1296 static Register registers[] = { x1, x2, x3, x4, x5 }; |
| 1349 return registers; | 1297 return registers; |
| 1350 } | 1298 } |
| 1351 | 1299 |
| 1352 | 1300 |
| 1353 Register* KeyedStoreStubCompiler::registers() { | 1301 Register* KeyedStoreStubCompiler::registers() { |
| 1354 // receiver, name, value, scratch1, scratch2, scratch3. | 1302 // receiver, name, scratch1, scratch2, scratch3. |
| 1355 static Register registers[] = { x2, x1, x0, x3, x4, x5 }; | 1303 static Register registers[] = { x2, x1, x3, x4, x5 }; |
| 1356 return registers; | 1304 return registers; |
| 1357 } | 1305 } |
| 1358 | 1306 |
| 1359 | 1307 |
| 1360 #undef __ | 1308 #undef __ |
| 1361 #define __ ACCESS_MASM(masm) | 1309 #define __ ACCESS_MASM(masm) |
| 1362 | 1310 |
| 1363 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, | 1311 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, |
| 1364 Handle<HeapType> type, | 1312 Handle<HeapType> type, |
| 1365 Register receiver, | 1313 Register receiver, |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1538 | 1486 |
| 1539 // Miss case, call the runtime. | 1487 // Miss case, call the runtime. |
| 1540 __ Bind(&miss); | 1488 __ Bind(&miss); |
| 1541 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1489 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
| 1542 } | 1490 } |
| 1543 | 1491 |
| 1544 | 1492 |
| 1545 } } // namespace v8::internal | 1493 } } // namespace v8::internal |
| 1546 | 1494 |
| 1547 #endif // V8_TARGET_ARCH_A64 | 1495 #endif // V8_TARGET_ARCH_A64 |
| OLD | NEW |