OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
362 __ JumpIfSmi(object, slow_case); | 362 __ JumpIfSmi(object, slow_case); |
363 // Check that the object is some kind of JSObject. | 363 // Check that the object is some kind of JSObject. |
364 __ JumpIfObjectType(object, map, scratch1, FIRST_JS_RECEIVER_TYPE, | 364 __ JumpIfObjectType(object, map, scratch1, FIRST_JS_RECEIVER_TYPE, |
365 slow_case, lt); | 365 slow_case, lt); |
366 | 366 |
367 // Check that the key is a positive smi. | 367 // Check that the key is a positive smi. |
368 __ JumpIfNotSmi(key, slow_case); | 368 __ JumpIfNotSmi(key, slow_case); |
369 __ Tbnz(key, kXSignBit, slow_case); | 369 __ Tbnz(key, kXSignBit, slow_case); |
370 | 370 |
371 // Load the elements object and check its map. | 371 // Load the elements object and check its map. |
372 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); | 372 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); |
373 __ Ldr(map, FieldMemOperand(object, JSObject::kElementsOffset)); | 373 __ Ldr(map, FieldMemOperand(object, JSObject::kElementsOffset)); |
374 __ CheckMap(map, scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); | 374 __ CheckMap(map, scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); |
375 | 375 |
376 // Check if element is in the range of mapped arguments. If not, jump | 376 // Check if element is in the range of mapped arguments. If not, jump |
377 // to the unmapped lookup. | 377 // to the unmapped lookup. |
378 __ Ldr(scratch1, FieldMemOperand(map, FixedArray::kLengthOffset)); | 378 __ Ldr(scratch1, FieldMemOperand(map, FixedArray::kLengthOffset)); |
379 __ Sub(scratch1, scratch1, Operand(Smi::FromInt(2))); | 379 __ Sub(scratch1, scratch1, Operand(Smi::FromInt(2))); |
380 __ Cmp(key, scratch1); | 380 __ Cmp(key, scratch1); |
381 __ B(hs, unmapped_case); | 381 __ B(hs, unmapped_case); |
382 | 382 |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
495 | 495 |
496 // TODO(jbramley): Does the target actually expect an argument in x3, or is | 496 // TODO(jbramley): Does the target actually expect an argument in x3, or is |
497 // this inherited from ARM's push semantics? | 497 // this inherited from ARM's push semantics? |
498 __ Mov(x3, x0); | 498 __ Mov(x3, x0); |
499 __ Push(x3, x2); | 499 __ Push(x3, x2); |
500 | 500 |
501 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 501 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
502 } | 502 } |
503 | 503 |
504 | 504 |
505 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 505 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { |
506 // ---------- S t a t e -------------- | 506 // ---------- S t a t e -------------- |
507 // -- lr : return address | 507 // -- lr : return address |
508 // -- x0 : key | 508 // -- x0 : key |
509 // -- x1 : receiver | 509 // -- x1 : receiver |
510 // ----------------------------------- | 510 // ----------------------------------- |
511 Register result = x0; | 511 Register result = x0; |
512 Register key = x0; | 512 Register key = x0; |
513 Register receiver = x1; | 513 Register receiver = x1; |
514 Label miss, unmapped; | 514 Label miss, unmapped; |
515 | 515 |
(...skipping 11 matching lines...) Expand all Loading... |
527 __ JumpIfRoot(x2, Heap::kTheHoleValueRootIndex, &miss); | 527 __ JumpIfRoot(x2, Heap::kTheHoleValueRootIndex, &miss); |
528 // Move the result in x0. x0 must be preserved on miss. | 528 // Move the result in x0. x0 must be preserved on miss. |
529 __ Mov(result, x2); | 529 __ Mov(result, x2); |
530 __ Ret(); | 530 __ Ret(); |
531 | 531 |
532 __ Bind(&miss); | 532 __ Bind(&miss); |
533 GenerateMiss(masm); | 533 GenerateMiss(masm); |
534 } | 534 } |
535 | 535 |
536 | 536 |
537 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) { | 537 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { |
538 ASM_LOCATION("KeyedStoreIC::GenerateNonStrictArguments"); | 538 ASM_LOCATION("KeyedStoreIC::GenerateSloppyArguments"); |
539 // ---------- S t a t e -------------- | 539 // ---------- S t a t e -------------- |
540 // -- lr : return address | 540 // -- lr : return address |
541 // -- x0 : value | 541 // -- x0 : value |
542 // -- x1 : key | 542 // -- x1 : key |
543 // -- x2 : receiver | 543 // -- x2 : receiver |
544 // ----------------------------------- | 544 // ----------------------------------- |
545 | 545 |
546 Label slow, notin; | 546 Label slow, notin; |
547 | 547 |
548 Register value = x0; | 548 Register value = x0; |
(...skipping 855 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1404 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); | 1404 ASSERT(to_patch->Mask(TestBranchMask) == TBNZ); |
1405 // This is JumpIfSmi(smi_reg, branch_imm). | 1405 // This is JumpIfSmi(smi_reg, branch_imm). |
1406 patcher.tbz(smi_reg, 0, branch_imm); | 1406 patcher.tbz(smi_reg, 0, branch_imm); |
1407 } | 1407 } |
1408 } | 1408 } |
1409 | 1409 |
1410 | 1410 |
1411 } } // namespace v8::internal | 1411 } } // namespace v8::internal |
1412 | 1412 |
1413 #endif // V8_TARGET_ARCH_A64 | 1413 #endif // V8_TARGET_ARCH_A64 |
OLD | NEW |