Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/builtins/builtins-sharedarraybuffer.cc

Issue 2732213005: Revert "This is a speculative chain of reverts to improve a Chrome" (Closed)
Patch Set: merge Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/builtins/builtins.h ('k') | src/debug/mirrors.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/macros.h"
6 #include "src/base/platform/mutex.h"
7 #include "src/base/platform/time.h"
5 #include "src/builtins/builtins-utils.h" 8 #include "src/builtins/builtins-utils.h"
6 #include "src/builtins/builtins.h" 9 #include "src/builtins/builtins.h"
7 #include "src/code-factory.h" 10 #include "src/code-factory.h"
8 #include "src/code-stub-assembler.h" 11 #include "src/code-stub-assembler.h"
12 #include "src/conversions-inl.h"
13 #include "src/counters.h"
14 #include "src/factory.h"
15 #include "src/futex-emulation.h"
16 #include "src/globals.h"
17 #include "src/objects-inl.h"
9 18
10 namespace v8 { 19 namespace v8 {
11 namespace internal { 20 namespace internal {
12 21
13 using compiler::Node; 22 using compiler::Node;
14 23
15 class SharedArrayBufferBuiltinsAssembler : public CodeStubAssembler { 24 class SharedArrayBufferBuiltinsAssembler : public CodeStubAssembler {
16 public: 25 public:
17 explicit SharedArrayBufferBuiltinsAssembler( 26 explicit SharedArrayBufferBuiltinsAssembler(
18 compiler::CodeAssemblerState* state) 27 compiler::CodeAssemblerState* state)
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after
330 AtomicExchange(MachineType::Uint32(), backing_store, 339 AtomicExchange(MachineType::Uint32(), backing_store,
331 WordShl(index_word, 2), value_word32))); 340 WordShl(index_word, 2), value_word32)));
332 341
333 // This shouldn't happen, we've already validated the type. 342 // This shouldn't happen, we've already validated the type.
334 Bind(&other); 343 Bind(&other);
335 Unreachable(); 344 Unreachable();
336 #endif // V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC64 345 #endif // V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC64
337 // || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_S390X 346 // || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_S390X
338 } 347 }
339 348
349 inline bool AtomicIsLockFree(uint32_t size) {
350 return size == 1 || size == 2 || size == 4;
351 }
352
353 // ES #sec-atomics.islockfree
354 BUILTIN(AtomicsIsLockFree) {
355 HandleScope scope(isolate);
356 Handle<Object> size = args.atOrUndefined(isolate, 1);
357 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, size, Object::ToNumber(size));
358 return *isolate->factory()->ToBoolean(AtomicIsLockFree(size->Number()));
359 }
360
361 // ES #sec-validatesharedintegertypedarray
362 MUST_USE_RESULT MaybeHandle<JSTypedArray> ValidateSharedIntegerTypedArray(
363 Isolate* isolate, Handle<Object> object, bool only_int32 = false) {
364 if (object->IsJSTypedArray()) {
365 Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(object);
366 if (typed_array->GetBuffer()->is_shared()) {
367 if (only_int32) {
368 if (typed_array->type() == kExternalInt32Array) return typed_array;
369 } else {
370 if (typed_array->type() != kExternalFloat32Array &&
371 typed_array->type() != kExternalFloat64Array &&
372 typed_array->type() != kExternalUint8ClampedArray)
373 return typed_array;
374 }
375 }
376 }
377
378 THROW_NEW_ERROR(
379 isolate,
380 NewTypeError(only_int32 ? MessageTemplate::kNotInt32SharedTypedArray
381 : MessageTemplate::kNotIntegerSharedTypedArray,
382 object),
383 JSTypedArray);
384 }
385
386 // ES #sec-validateatomicaccess
387 // ValidateAtomicAccess( typedArray, requestIndex )
388 MUST_USE_RESULT Maybe<size_t> ValidateAtomicAccess(
389 Isolate* isolate, Handle<JSTypedArray> typed_array,
390 Handle<Object> request_index) {
391 // TOOD(v8:5961): Use ToIndex for indexes
392 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, request_index,
393 Object::ToNumber(request_index),
394 Nothing<size_t>());
395 Handle<Object> offset;
396 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, offset,
397 Object::ToInteger(isolate, request_index),
398 Nothing<size_t>());
399 if (!request_index->SameValue(*offset)) {
400 isolate->Throw(*isolate->factory()->NewRangeError(
401 MessageTemplate::kInvalidAtomicAccessIndex));
402 return Nothing<size_t>();
403 }
404 size_t access_index;
405 uint32_t length = typed_array->length_value();
406 if (!TryNumberToSize(*request_index, &access_index) ||
407 access_index >= length) {
408 isolate->Throw(*isolate->factory()->NewRangeError(
409 MessageTemplate::kInvalidAtomicAccessIndex));
410 return Nothing<size_t>();
411 }
412 return Just<size_t>(access_index);
413 }
414
415 // ES #sec-atomics.wake
416 // Atomics.wake( typedArray, index, count )
417 BUILTIN(AtomicsWake) {
418 HandleScope scope(isolate);
419 Handle<Object> array = args.atOrUndefined(isolate, 1);
420 Handle<Object> index = args.atOrUndefined(isolate, 2);
421 Handle<Object> count = args.atOrUndefined(isolate, 3);
422
423 Handle<JSTypedArray> sta;
424 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
425 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array, true));
426
427 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
428 if (maybe_index.IsNothing()) return isolate->heap()->exception();
429 size_t i = maybe_index.FromJust();
430
431 uint32_t c;
432 if (count->IsUndefined(isolate)) {
433 c = kMaxUInt32;
434 } else {
435 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, count,
436 Object::ToInteger(isolate, count));
437 double count_double = count->Number();
438 if (count_double < 0)
439 count_double = 0;
440 else if (count_double > kMaxUInt32)
441 count_double = kMaxUInt32;
442 c = static_cast<uint32_t>(count_double);
443 }
444
445 Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
446 size_t addr = (i << 2) + NumberToSize(sta->byte_offset());
447
448 return FutexEmulation::Wake(isolate, array_buffer, addr, c);
449 }
450
451 // ES #sec-atomics.wait
452 // Atomics.wait( typedArray, index, value, timeout )
453 BUILTIN(AtomicsWait) {
454 HandleScope scope(isolate);
455 Handle<Object> array = args.atOrUndefined(isolate, 1);
456 Handle<Object> index = args.atOrUndefined(isolate, 2);
457 Handle<Object> value = args.atOrUndefined(isolate, 3);
458 Handle<Object> timeout = args.atOrUndefined(isolate, 4);
459
460 Handle<JSTypedArray> sta;
461 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
462 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array, true));
463
464 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
465 if (maybe_index.IsNothing()) return isolate->heap()->exception();
466 size_t i = maybe_index.FromJust();
467
468 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
469 Object::ToInt32(isolate, value));
470 int32_t value_int32 = NumberToInt32(*value);
471
472 double timeout_number;
473 if (timeout->IsUndefined(isolate)) {
474 timeout_number = isolate->heap()->infinity_value()->Number();
475 } else {
476 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, timeout,
477 Object::ToNumber(timeout));
478 timeout_number = timeout->Number();
479 if (std::isnan(timeout_number))
480 timeout_number = isolate->heap()->infinity_value()->Number();
481 else if (timeout_number < 0)
482 timeout_number = 0;
483 }
484
485 if (!isolate->allow_atomics_wait()) {
486 THROW_NEW_ERROR_RETURN_FAILURE(
487 isolate, NewTypeError(MessageTemplate::kAtomicsWaitNotAllowed));
488 }
489
490 Handle<JSArrayBuffer> array_buffer = sta->GetBuffer();
491 size_t addr = (i << 2) + NumberToSize(sta->byte_offset());
492
493 return FutexEmulation::Wait(isolate, array_buffer, addr, value_int32,
494 timeout_number);
495 }
496
497 namespace {
498
499 #if V8_CC_GNU
500
501 template <typename T>
502 inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
503 (void)__atomic_compare_exchange_n(p, &oldval, newval, 0, __ATOMIC_SEQ_CST,
504 __ATOMIC_SEQ_CST);
505 return oldval;
506 }
507
508 template <typename T>
509 inline T AddSeqCst(T* p, T value) {
510 return __atomic_fetch_add(p, value, __ATOMIC_SEQ_CST);
511 }
512
513 template <typename T>
514 inline T SubSeqCst(T* p, T value) {
515 return __atomic_fetch_sub(p, value, __ATOMIC_SEQ_CST);
516 }
517
518 template <typename T>
519 inline T AndSeqCst(T* p, T value) {
520 return __atomic_fetch_and(p, value, __ATOMIC_SEQ_CST);
521 }
522
523 template <typename T>
524 inline T OrSeqCst(T* p, T value) {
525 return __atomic_fetch_or(p, value, __ATOMIC_SEQ_CST);
526 }
527
528 template <typename T>
529 inline T XorSeqCst(T* p, T value) {
530 return __atomic_fetch_xor(p, value, __ATOMIC_SEQ_CST);
531 }
532
533 #elif V8_CC_MSVC
534
535 #define InterlockedCompareExchange32 _InterlockedCompareExchange
536 #define InterlockedExchange32 _InterlockedExchange
537 #define InterlockedExchangeAdd32 _InterlockedExchangeAdd
538 #define InterlockedAnd32 _InterlockedAnd
539 #define InterlockedOr32 _InterlockedOr
540 #define InterlockedXor32 _InterlockedXor
541 #define InterlockedExchangeAdd16 _InterlockedExchangeAdd16
542 #define InterlockedCompareExchange8 _InterlockedCompareExchange8
543 #define InterlockedExchangeAdd8 _InterlockedExchangeAdd8
544
545 #define ATOMIC_OPS(type, suffix, vctype) \
546 inline type AddSeqCst(type* p, type value) { \
547 return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
548 bit_cast<vctype>(value)); \
549 } \
550 inline type SubSeqCst(type* p, type value) { \
551 return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
552 -bit_cast<vctype>(value)); \
553 } \
554 inline type AndSeqCst(type* p, type value) { \
555 return InterlockedAnd##suffix(reinterpret_cast<vctype*>(p), \
556 bit_cast<vctype>(value)); \
557 } \
558 inline type OrSeqCst(type* p, type value) { \
559 return InterlockedOr##suffix(reinterpret_cast<vctype*>(p), \
560 bit_cast<vctype>(value)); \
561 } \
562 inline type XorSeqCst(type* p, type value) { \
563 return InterlockedXor##suffix(reinterpret_cast<vctype*>(p), \
564 bit_cast<vctype>(value)); \
565 } \
566 inline type CompareExchangeSeqCst(type* p, type oldval, type newval) { \
567 return InterlockedCompareExchange##suffix(reinterpret_cast<vctype*>(p), \
568 bit_cast<vctype>(newval), \
569 bit_cast<vctype>(oldval)); \
570 }
571
572 ATOMIC_OPS(int8_t, 8, char)
573 ATOMIC_OPS(uint8_t, 8, char)
574 ATOMIC_OPS(int16_t, 16, short) /* NOLINT(runtime/int) */
575 ATOMIC_OPS(uint16_t, 16, short) /* NOLINT(runtime/int) */
576 ATOMIC_OPS(int32_t, 32, long) /* NOLINT(runtime/int) */
577 ATOMIC_OPS(uint32_t, 32, long) /* NOLINT(runtime/int) */
578
579 #undef ATOMIC_OPS_INTEGER
580 #undef ATOMIC_OPS
581
582 #undef InterlockedCompareExchange32
583 #undef InterlockedExchange32
584 #undef InterlockedExchangeAdd32
585 #undef InterlockedAnd32
586 #undef InterlockedOr32
587 #undef InterlockedXor32
588 #undef InterlockedExchangeAdd16
589 #undef InterlockedCompareExchange8
590 #undef InterlockedExchangeAdd8
591
592 #else
593
594 #error Unsupported platform!
595
596 #endif
597
598 template <typename T>
599 T FromObject(Handle<Object> number);
600
601 template <>
602 inline uint8_t FromObject<uint8_t>(Handle<Object> number) {
603 return NumberToUint32(*number);
604 }
605
606 template <>
607 inline int8_t FromObject<int8_t>(Handle<Object> number) {
608 return NumberToInt32(*number);
609 }
610
611 template <>
612 inline uint16_t FromObject<uint16_t>(Handle<Object> number) {
613 return NumberToUint32(*number);
614 }
615
616 template <>
617 inline int16_t FromObject<int16_t>(Handle<Object> number) {
618 return NumberToInt32(*number);
619 }
620
621 template <>
622 inline uint32_t FromObject<uint32_t>(Handle<Object> number) {
623 return NumberToUint32(*number);
624 }
625
626 template <>
627 inline int32_t FromObject<int32_t>(Handle<Object> number) {
628 return NumberToInt32(*number);
629 }
630
631 inline Object* ToObject(Isolate* isolate, int8_t t) { return Smi::FromInt(t); }
632
633 inline Object* ToObject(Isolate* isolate, uint8_t t) { return Smi::FromInt(t); }
634
635 inline Object* ToObject(Isolate* isolate, int16_t t) { return Smi::FromInt(t); }
636
637 inline Object* ToObject(Isolate* isolate, uint16_t t) {
638 return Smi::FromInt(t);
639 }
640
641 inline Object* ToObject(Isolate* isolate, int32_t t) {
642 return *isolate->factory()->NewNumber(t);
643 }
644
645 inline Object* ToObject(Isolate* isolate, uint32_t t) {
646 return *isolate->factory()->NewNumber(t);
647 }
648
649 template <typename T>
650 inline Object* DoCompareExchange(Isolate* isolate, void* buffer, size_t index,
651 Handle<Object> oldobj, Handle<Object> newobj) {
652 T oldval = FromObject<T>(oldobj);
653 T newval = FromObject<T>(newobj);
654 T result =
655 CompareExchangeSeqCst(static_cast<T*>(buffer) + index, oldval, newval);
656 return ToObject(isolate, result);
657 }
658
659 template <typename T>
660 inline Object* DoAdd(Isolate* isolate, void* buffer, size_t index,
661 Handle<Object> obj) {
662 T value = FromObject<T>(obj);
663 T result = AddSeqCst(static_cast<T*>(buffer) + index, value);
664 return ToObject(isolate, result);
665 }
666
667 template <typename T>
668 inline Object* DoSub(Isolate* isolate, void* buffer, size_t index,
669 Handle<Object> obj) {
670 T value = FromObject<T>(obj);
671 T result = SubSeqCst(static_cast<T*>(buffer) + index, value);
672 return ToObject(isolate, result);
673 }
674
675 template <typename T>
676 inline Object* DoAnd(Isolate* isolate, void* buffer, size_t index,
677 Handle<Object> obj) {
678 T value = FromObject<T>(obj);
679 T result = AndSeqCst(static_cast<T*>(buffer) + index, value);
680 return ToObject(isolate, result);
681 }
682
683 template <typename T>
684 inline Object* DoOr(Isolate* isolate, void* buffer, size_t index,
685 Handle<Object> obj) {
686 T value = FromObject<T>(obj);
687 T result = OrSeqCst(static_cast<T*>(buffer) + index, value);
688 return ToObject(isolate, result);
689 }
690
691 template <typename T>
692 inline Object* DoXor(Isolate* isolate, void* buffer, size_t index,
693 Handle<Object> obj) {
694 T value = FromObject<T>(obj);
695 T result = XorSeqCst(static_cast<T*>(buffer) + index, value);
696 return ToObject(isolate, result);
697 }
698
699 } // anonymous namespace
700
701 // Duplicated from objects.h
702 // V has parameters (Type, type, TYPE, C type, element_size)
703 #define INTEGER_TYPED_ARRAYS(V) \
704 V(Uint8, uint8, UINT8, uint8_t, 1) \
705 V(Int8, int8, INT8, int8_t, 1) \
706 V(Uint16, uint16, UINT16, uint16_t, 2) \
707 V(Int16, int16, INT16, int16_t, 2) \
708 V(Uint32, uint32, UINT32, uint32_t, 4) \
709 V(Int32, int32, INT32, int32_t, 4)
710
711 // ES #sec-atomics.wait
712 // Atomics.compareExchange( typedArray, index, expectedValue, replacementValue )
713 BUILTIN(AtomicsCompareExchange) {
714 HandleScope scope(isolate);
715 Handle<Object> array = args.atOrUndefined(isolate, 1);
716 Handle<Object> index = args.atOrUndefined(isolate, 2);
717 Handle<Object> expected_value = args.atOrUndefined(isolate, 3);
718 Handle<Object> replacement_value = args.atOrUndefined(isolate, 4);
719
720 Handle<JSTypedArray> sta;
721 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
722 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
723
724 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
725 if (maybe_index.IsNothing()) return isolate->heap()->exception();
726 size_t i = maybe_index.FromJust();
727
728 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
729 isolate, expected_value, Object::ToInteger(isolate, expected_value));
730
731 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
732 isolate, replacement_value,
733 Object::ToInteger(isolate, replacement_value));
734
735 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
736 NumberToSize(sta->byte_offset());
737
738 switch (sta->type()) {
739 #define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
740 case kExternal##Type##Array: \
741 return DoCompareExchange<ctype>(isolate, source, i, expected_value, \
742 replacement_value);
743
744 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
745 #undef TYPED_ARRAY_CASE
746
747 default:
748 break;
749 }
750
751 UNREACHABLE();
752 return isolate->heap()->undefined_value();
753 }
754
755 // ES #sec-atomics.add
756 // Atomics.add( typedArray, index, value )
757 BUILTIN(AtomicsAdd) {
758 HandleScope scope(isolate);
759 Handle<Object> array = args.atOrUndefined(isolate, 1);
760 Handle<Object> index = args.atOrUndefined(isolate, 2);
761 Handle<Object> value = args.atOrUndefined(isolate, 3);
762
763 Handle<JSTypedArray> sta;
764 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
765 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
766
767 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
768 if (maybe_index.IsNothing()) return isolate->heap()->exception();
769 size_t i = maybe_index.FromJust();
770
771 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
772 Object::ToInteger(isolate, value));
773
774 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
775 NumberToSize(sta->byte_offset());
776
777 switch (sta->type()) {
778 #define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
779 case kExternal##Type##Array: \
780 return DoAdd<ctype>(isolate, source, i, value);
781
782 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
783 #undef TYPED_ARRAY_CASE
784
785 default:
786 break;
787 }
788
789 UNREACHABLE();
790 return isolate->heap()->undefined_value();
791 }
792
793 // ES #sec-atomics.sub
794 // Atomics.sub( typedArray, index, value )
795 BUILTIN(AtomicsSub) {
796 HandleScope scope(isolate);
797 Handle<Object> array = args.atOrUndefined(isolate, 1);
798 Handle<Object> index = args.atOrUndefined(isolate, 2);
799 Handle<Object> value = args.atOrUndefined(isolate, 3);
800
801 Handle<JSTypedArray> sta;
802 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
803 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
804
805 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
806 if (maybe_index.IsNothing()) return isolate->heap()->exception();
807 size_t i = maybe_index.FromJust();
808
809 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
810 Object::ToInteger(isolate, value));
811
812 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
813 NumberToSize(sta->byte_offset());
814
815 switch (sta->type()) {
816 #define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
817 case kExternal##Type##Array: \
818 return DoSub<ctype>(isolate, source, i, value);
819
820 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
821 #undef TYPED_ARRAY_CASE
822
823 default:
824 break;
825 }
826
827 UNREACHABLE();
828 return isolate->heap()->undefined_value();
829 }
830
831 // ES #sec-atomics.and
832 // Atomics.and( typedArray, index, value )
833 BUILTIN(AtomicsAnd) {
834 HandleScope scope(isolate);
835 Handle<Object> array = args.atOrUndefined(isolate, 1);
836 Handle<Object> index = args.atOrUndefined(isolate, 2);
837 Handle<Object> value = args.atOrUndefined(isolate, 3);
838
839 Handle<JSTypedArray> sta;
840 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
841 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
842
843 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
844 if (maybe_index.IsNothing()) return isolate->heap()->exception();
845 size_t i = maybe_index.FromJust();
846
847 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
848 Object::ToInteger(isolate, value));
849
850 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
851 NumberToSize(sta->byte_offset());
852
853 switch (sta->type()) {
854 #define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
855 case kExternal##Type##Array: \
856 return DoAnd<ctype>(isolate, source, i, value);
857
858 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
859 #undef TYPED_ARRAY_CASE
860
861 default:
862 break;
863 }
864
865 UNREACHABLE();
866 return isolate->heap()->undefined_value();
867 }
868
869 // ES #sec-atomics.or
870 // Atomics.or( typedArray, index, value )
871 BUILTIN(AtomicsOr) {
872 HandleScope scope(isolate);
873 Handle<Object> array = args.atOrUndefined(isolate, 1);
874 Handle<Object> index = args.atOrUndefined(isolate, 2);
875 Handle<Object> value = args.atOrUndefined(isolate, 3);
876
877 Handle<JSTypedArray> sta;
878 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
879 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
880
881 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
882 if (maybe_index.IsNothing()) return isolate->heap()->exception();
883 size_t i = maybe_index.FromJust();
884
885 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
886 Object::ToInteger(isolate, value));
887
888 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
889 NumberToSize(sta->byte_offset());
890
891 switch (sta->type()) {
892 #define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
893 case kExternal##Type##Array: \
894 return DoOr<ctype>(isolate, source, i, value);
895
896 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
897 #undef TYPED_ARRAY_CASE
898
899 default:
900 break;
901 }
902
903 UNREACHABLE();
904 return isolate->heap()->undefined_value();
905 }
906
907 // ES #sec-atomics.xor
908 // Atomics.xor( typedArray, index, value )
909 BUILTIN(AtomicsXor) {
910 HandleScope scope(isolate);
911 Handle<Object> array = args.atOrUndefined(isolate, 1);
912 Handle<Object> index = args.atOrUndefined(isolate, 2);
913 Handle<Object> value = args.atOrUndefined(isolate, 3);
914
915 Handle<JSTypedArray> sta;
916 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
917 isolate, sta, ValidateSharedIntegerTypedArray(isolate, array));
918
919 Maybe<size_t> maybe_index = ValidateAtomicAccess(isolate, sta, index);
920 if (maybe_index.IsNothing()) return isolate->heap()->exception();
921 size_t i = maybe_index.FromJust();
922
923 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value,
924 Object::ToInteger(isolate, value));
925
926 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
927 NumberToSize(sta->byte_offset());
928
929 switch (sta->type()) {
930 #define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype, size) \
931 case kExternal##Type##Array: \
932 return DoXor<ctype>(isolate, source, i, value);
933
934 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
935 #undef TYPED_ARRAY_CASE
936
937 default:
938 break;
939 }
940
941 UNREACHABLE();
942 return isolate->heap()->undefined_value();
943 }
944
340 } // namespace internal 945 } // namespace internal
341 } // namespace v8 946 } // namespace v8
OLDNEW
« no previous file with comments | « src/builtins/builtins.h ('k') | src/debug/mirrors.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698