OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" | 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <utility> | 9 #include <utility> |
10 #include <vector> | 10 #include <vector> |
(...skipping 12 matching lines...) Expand all Loading... |
23 #include "build/build_config.h" | 23 #include "build/build_config.h" |
24 #include "content/common/media/media_stream_messages.h" | 24 #include "content/common/media/media_stream_messages.h" |
25 #include "content/public/common/content_client.h" | 25 #include "content/public/common/content_client.h" |
26 #include "content/public/common/content_switches.h" | 26 #include "content/public/common/content_switches.h" |
27 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" | 27 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" |
28 #include "content/public/common/features.h" | 28 #include "content/public/common/features.h" |
29 #include "content/public/common/renderer_preferences.h" | 29 #include "content/public/common/renderer_preferences.h" |
30 #include "content/public/common/webrtc_ip_handling_policy.h" | 30 #include "content/public/common/webrtc_ip_handling_policy.h" |
31 #include "content/public/renderer/content_renderer_client.h" | 31 #include "content/public/renderer/content_renderer_client.h" |
32 #include "content/renderer/media/media_stream.h" | 32 #include "content/renderer/media/media_stream.h" |
| 33 #include "content/renderer/media/media_stream_audio_processor.h" |
| 34 #include "content/renderer/media/media_stream_audio_processor_options.h" |
| 35 #include "content/renderer/media/media_stream_audio_source.h" |
| 36 #include "content/renderer/media/media_stream_constraints_util.h" |
33 #include "content/renderer/media/media_stream_video_source.h" | 37 #include "content/renderer/media/media_stream_video_source.h" |
34 #include "content/renderer/media/media_stream_video_track.h" | 38 #include "content/renderer/media/media_stream_video_track.h" |
35 #include "content/renderer/media/peer_connection_identity_store.h" | 39 #include "content/renderer/media/peer_connection_identity_store.h" |
36 #include "content/renderer/media/rtc_peer_connection_handler.h" | 40 #include "content/renderer/media/rtc_peer_connection_handler.h" |
37 #include "content/renderer/media/rtc_video_decoder_factory.h" | 41 #include "content/renderer/media/rtc_video_decoder_factory.h" |
38 #include "content/renderer/media/rtc_video_encoder_factory.h" | 42 #include "content/renderer/media/rtc_video_encoder_factory.h" |
| 43 #include "content/renderer/media/webaudio_capturer_source.h" |
| 44 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" |
39 #include "content/renderer/media/webrtc/stun_field_trial.h" | 45 #include "content/renderer/media/webrtc/stun_field_trial.h" |
| 46 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" |
40 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 47 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
41 #include "content/renderer/media/webrtc_audio_device_impl.h" | 48 #include "content/renderer/media/webrtc_audio_device_impl.h" |
| 49 #include "content/renderer/media/webrtc_local_audio_track.h" |
42 #include "content/renderer/media/webrtc_logging.h" | 50 #include "content/renderer/media/webrtc_logging.h" |
43 #include "content/renderer/media/webrtc_uma_histograms.h" | 51 #include "content/renderer/media/webrtc_uma_histograms.h" |
44 #include "content/renderer/p2p/empty_network_manager.h" | 52 #include "content/renderer/p2p/empty_network_manager.h" |
45 #include "content/renderer/p2p/filtering_network_manager.h" | 53 #include "content/renderer/p2p/filtering_network_manager.h" |
46 #include "content/renderer/p2p/ipc_network_manager.h" | 54 #include "content/renderer/p2p/ipc_network_manager.h" |
47 #include "content/renderer/p2p/ipc_socket_factory.h" | 55 #include "content/renderer/p2p/ipc_socket_factory.h" |
48 #include "content/renderer/p2p/port_allocator.h" | 56 #include "content/renderer/p2p/port_allocator.h" |
49 #include "content/renderer/render_frame_impl.h" | 57 #include "content/renderer/render_frame_impl.h" |
50 #include "content/renderer/render_thread_impl.h" | 58 #include "content/renderer/render_thread_impl.h" |
51 #include "content/renderer/render_view_impl.h" | 59 #include "content/renderer/render_view_impl.h" |
52 #include "crypto/openssl_util.h" | 60 #include "crypto/openssl_util.h" |
53 #include "jingle/glue/thread_wrapper.h" | 61 #include "jingle/glue/thread_wrapper.h" |
54 #include "media/base/media_permission.h" | 62 #include "media/base/media_permission.h" |
55 #include "media/filters/ffmpeg_glue.h" | 63 #include "media/filters/ffmpeg_glue.h" |
56 #include "media/renderers/gpu_video_accelerator_factories.h" | 64 #include "media/renderers/gpu_video_accelerator_factories.h" |
57 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" | 65 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" |
58 #include "third_party/WebKit/public/platform/WebMediaStream.h" | 66 #include "third_party/WebKit/public/platform/WebMediaStream.h" |
59 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" | 67 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" |
60 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | 68 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" |
61 #include "third_party/WebKit/public/platform/WebURL.h" | 69 #include "third_party/WebKit/public/platform/WebURL.h" |
62 #include "third_party/WebKit/public/web/WebDocument.h" | 70 #include "third_party/WebKit/public/web/WebDocument.h" |
63 #include "third_party/WebKit/public/web/WebFrame.h" | 71 #include "third_party/WebKit/public/web/WebFrame.h" |
64 #include "third_party/webrtc/api/dtlsidentitystore.h" | 72 #include "third_party/webrtc/api/dtlsidentitystore.h" |
65 #include "third_party/webrtc/api/mediaconstraintsinterface.h" | 73 #include "third_party/webrtc/api/mediaconstraintsinterface.h" |
66 #include "third_party/webrtc/base/ssladapter.h" | 74 #include "third_party/webrtc/base/ssladapter.h" |
| 75 #include "third_party/webrtc/media/base/mediachannel.h" |
67 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" | 76 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" |
68 | 77 |
69 #if defined(OS_ANDROID) | 78 #if defined(OS_ANDROID) |
70 #include "media/base/android/media_codec_util.h" | 79 #include "media/base/android/media_codec_util.h" |
71 #endif | 80 #endif |
72 | 81 |
73 namespace content { | 82 namespace content { |
74 | 83 |
75 namespace { | 84 namespace { |
76 | 85 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
114 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( | 123 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( |
115 blink::WebRTCPeerConnectionHandlerClient* client) { | 124 blink::WebRTCPeerConnectionHandlerClient* client) { |
116 // Save histogram data so we can see how much PeerConnetion is used. | 125 // Save histogram data so we can see how much PeerConnetion is used. |
117 // The histogram counts the number of calls to the JS API | 126 // The histogram counts the number of calls to the JS API |
118 // webKitRTCPeerConnection. | 127 // webKitRTCPeerConnection. |
119 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | 128 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
120 | 129 |
121 return new RTCPeerConnectionHandler(client, this); | 130 return new RTCPeerConnectionHandler(client, this); |
122 } | 131 } |
123 | 132 |
| 133 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource( |
| 134 int render_frame_id, |
| 135 const blink::WebMediaConstraints& audio_constraints, |
| 136 MediaStreamAudioSource* source_data) { |
| 137 DVLOG(1) << "InitializeMediaStreamAudioSources()"; |
| 138 |
| 139 // Do additional source initialization if the audio source is a valid |
| 140 // microphone or tab audio. |
| 141 |
| 142 StreamDeviceInfo device_info = source_data->device_info(); |
| 143 |
| 144 cricket::AudioOptions options; |
| 145 // Apply relevant constraints. |
| 146 options.echo_cancellation = ConstraintToOptional( |
| 147 audio_constraints, &blink::WebMediaTrackConstraintSet::echoCancellation); |
| 148 options.delay_agnostic_aec = ConstraintToOptional( |
| 149 audio_constraints, |
| 150 &blink::WebMediaTrackConstraintSet::googDAEchoCancellation); |
| 151 options.auto_gain_control = ConstraintToOptional( |
| 152 audio_constraints, |
| 153 &blink::WebMediaTrackConstraintSet::googAutoGainControl); |
| 154 options.experimental_agc = ConstraintToOptional( |
| 155 audio_constraints, |
| 156 &blink::WebMediaTrackConstraintSet::googExperimentalAutoGainControl); |
| 157 options.noise_suppression = ConstraintToOptional( |
| 158 audio_constraints, |
| 159 &blink::WebMediaTrackConstraintSet::googNoiseSuppression); |
| 160 options.experimental_ns = ConstraintToOptional( |
| 161 audio_constraints, |
| 162 &blink::WebMediaTrackConstraintSet::googExperimentalNoiseSuppression); |
| 163 options.highpass_filter = ConstraintToOptional( |
| 164 audio_constraints, |
| 165 &blink::WebMediaTrackConstraintSet::googHighpassFilter); |
| 166 options.typing_detection = ConstraintToOptional( |
| 167 audio_constraints, |
| 168 &blink::WebMediaTrackConstraintSet::googTypingNoiseDetection); |
| 169 options.stereo_swapping = ConstraintToOptional( |
| 170 audio_constraints, |
| 171 &blink::WebMediaTrackConstraintSet::googAudioMirroring); |
| 172 |
| 173 MediaAudioConstraints::ApplyFixedAudioConstraints(&options); |
| 174 |
| 175 if (device_info.device.input.effects & |
| 176 media::AudioParameters::ECHO_CANCELLER) { |
| 177 // TODO(hta): Figure out if we should be looking at echoCancellation. |
| 178 // Previous code had googEchoCancellation only. |
| 179 const blink::BooleanConstraint& echoCancellation = |
| 180 audio_constraints.basic().googEchoCancellation; |
| 181 if (echoCancellation.hasExact() && !echoCancellation.exact()) { |
| 182 device_info.device.input.effects &= |
| 183 ~media::AudioParameters::ECHO_CANCELLER; |
| 184 } |
| 185 options.echo_cancellation = rtc::Optional<bool>(false); |
| 186 } |
| 187 |
| 188 std::unique_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer( |
| 189 render_frame_id, device_info, audio_constraints, source_data); |
| 190 if (!capturer.get()) { |
| 191 const std::string log_string = |
| 192 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; |
| 193 WebRtcLogMessage(log_string); |
| 194 DVLOG(1) << log_string; |
| 195 // TODO(xians): Don't we need to check if source_observer is observing |
| 196 // something? If not, then it looks like we have a leak here. |
| 197 // OTOH, if it _is_ observing something, then the callback might |
| 198 // be called multiple times which is likely also a bug. |
| 199 return false; |
| 200 } |
| 201 source_data->SetAudioCapturer(std::move(capturer)); |
| 202 |
| 203 // Creates a LocalAudioSource object which holds audio options. |
| 204 // TODO(xians): The option should apply to the track instead of the source. |
| 205 // TODO(perkj): Move audio constraints parsing to Chrome. |
| 206 // Currently there are a few constraints that are parsed by libjingle and |
| 207 // the state is set to ended if parsing fails. |
| 208 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( |
| 209 CreateLocalAudioSource(options).get()); |
| 210 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { |
| 211 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; |
| 212 return false; |
| 213 } |
| 214 source_data->SetLocalAudioSource(rtc_source.get()); |
| 215 return true; |
| 216 } |
| 217 |
124 WebRtcVideoCapturerAdapter* | 218 WebRtcVideoCapturerAdapter* |
125 PeerConnectionDependencyFactory::CreateVideoCapturer( | 219 PeerConnectionDependencyFactory::CreateVideoCapturer( |
126 bool is_screeencast) { | 220 bool is_screeencast) { |
127 // We need to make sure the libjingle thread wrappers have been created | 221 // We need to make sure the libjingle thread wrappers have been created |
128 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is | 222 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is |
129 // since the base class of WebRtcVideoCapturerAdapter is a | 223 // since the base class of WebRtcVideoCapturerAdapter is a |
130 // cricket::VideoCapturer and it uses the libjingle thread wrappers. | 224 // cricket::VideoCapturer and it uses the libjingle thread wrappers. |
131 if (!GetPcFactory().get()) | 225 if (!GetPcFactory().get()) |
132 return NULL; | 226 return NULL; |
133 return new WebRtcVideoCapturerAdapter(is_screeencast); | 227 return new WebRtcVideoCapturerAdapter(is_screeencast); |
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
424 } | 518 } |
425 | 519 |
426 scoped_refptr<webrtc::AudioSourceInterface> | 520 scoped_refptr<webrtc::AudioSourceInterface> |
427 PeerConnectionDependencyFactory::CreateLocalAudioSource( | 521 PeerConnectionDependencyFactory::CreateLocalAudioSource( |
428 const cricket::AudioOptions& options) { | 522 const cricket::AudioOptions& options) { |
429 scoped_refptr<webrtc::AudioSourceInterface> source = | 523 scoped_refptr<webrtc::AudioSourceInterface> source = |
430 GetPcFactory()->CreateAudioSource(options).get(); | 524 GetPcFactory()->CreateAudioSource(options).get(); |
431 return source; | 525 return source; |
432 } | 526 } |
433 | 527 |
| 528 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( |
| 529 const blink::WebMediaStreamTrack& track) { |
| 530 blink::WebMediaStreamSource source = track.source(); |
| 531 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); |
| 532 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source); |
| 533 |
| 534 if (!source_data) { |
| 535 if (source.requiresAudioConsumer()) { |
| 536 // We're adding a WebAudio MediaStream. |
| 537 // Create a specific capturer for each WebAudio consumer. |
| 538 CreateWebAudioSource(&source); |
| 539 source_data = MediaStreamAudioSource::From(source); |
| 540 DCHECK(source_data->webaudio_capturer()); |
| 541 } else { |
| 542 NOTREACHED() << "Local track missing MediaStreamAudioSource instance."; |
| 543 return; |
| 544 } |
| 545 } |
| 546 |
| 547 // Creates an adapter to hold all the libjingle objects. |
| 548 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( |
| 549 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), |
| 550 source_data->local_audio_source())); |
| 551 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( |
| 552 track.isEnabled()); |
| 553 |
| 554 // TODO(xians): Merge |source| to the capturer(). We can't do this today |
| 555 // because only one capturer() is supported while one |source| is created |
| 556 // for each audio track. |
| 557 std::unique_ptr<WebRtcLocalAudioTrack> audio_track( |
| 558 new WebRtcLocalAudioTrack(adapter.get())); |
| 559 |
| 560 // Start the source and connect the audio data flow to the track. |
| 561 // |
| 562 // TODO(miu): This logic will me moved to MediaStreamAudioSource (or a |
| 563 // subclass of it) in soon-upcoming changes. |
| 564 audio_track->Start(base::Bind(&MediaStreamAudioSource::StopAudioDeliveryTo, |
| 565 source_data->GetWeakPtr(), |
| 566 audio_track.get())); |
| 567 if (source_data->webaudio_capturer()) |
| 568 source_data->webaudio_capturer()->Start(audio_track.get()); |
| 569 else if (source_data->audio_capturer()) |
| 570 source_data->audio_capturer()->AddTrack(audio_track.get()); |
| 571 else |
| 572 NOTREACHED(); |
| 573 |
| 574 // Pass the ownership of the native local audio track to the blink track. |
| 575 blink::WebMediaStreamTrack writable_track = track; |
| 576 writable_track.setExtraData(audio_track.release()); |
| 577 } |
| 578 |
| 579 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( |
| 580 const blink::WebMediaStreamTrack& track) { |
| 581 blink::WebMediaStreamSource source = track.source(); |
| 582 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); |
| 583 DCHECK(source.remote()); |
| 584 DCHECK(MediaStreamAudioSource::From(source)); |
| 585 |
| 586 blink::WebMediaStreamTrack writable_track = track; |
| 587 writable_track.setExtraData( |
| 588 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); |
| 589 } |
| 590 |
| 591 void PeerConnectionDependencyFactory::CreateWebAudioSource( |
| 592 blink::WebMediaStreamSource* source) { |
| 593 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; |
| 594 |
| 595 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); |
| 596 source_data->SetWebAudioCapturer( |
| 597 base::WrapUnique(new WebAudioCapturerSource(source))); |
| 598 |
| 599 // Create a LocalAudioSource object which holds audio options. |
| 600 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |
| 601 cricket::AudioOptions options; |
| 602 source_data->SetLocalAudioSource(CreateLocalAudioSource(options).get()); |
| 603 source->setExtraData(source_data); |
| 604 } |
| 605 |
434 scoped_refptr<webrtc::VideoTrackInterface> | 606 scoped_refptr<webrtc::VideoTrackInterface> |
435 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 607 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
436 const std::string& id, | 608 const std::string& id, |
437 webrtc::VideoTrackSourceInterface* source) { | 609 webrtc::VideoTrackSourceInterface* source) { |
438 return GetPcFactory()->CreateVideoTrack(id, source).get(); | 610 return GetPcFactory()->CreateVideoTrack(id, source).get(); |
439 } | 611 } |
440 | 612 |
441 scoped_refptr<webrtc::VideoTrackInterface> | 613 scoped_refptr<webrtc::VideoTrackInterface> |
442 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 614 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
443 const std::string& id, cricket::VideoCapturer* capturer) { | 615 const std::string& id, cricket::VideoCapturer* capturer) { |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
562 // Stopping the thread will wait until all tasks have been | 734 // Stopping the thread will wait until all tasks have been |
563 // processed before returning. We wait for the above task to finish before | 735 // processed before returning. We wait for the above task to finish before |
564 // letting the the function continue to avoid any potential race issues. | 736 // letting the the function continue to avoid any potential race issues. |
565 chrome_worker_thread_.Stop(); | 737 chrome_worker_thread_.Stop(); |
566 } else { | 738 } else { |
567 NOTREACHED() << "Worker thread not running."; | 739 NOTREACHED() << "Worker thread not running."; |
568 } | 740 } |
569 } | 741 } |
570 } | 742 } |
571 | 743 |
| 744 std::unique_ptr<WebRtcAudioCapturer> |
| 745 PeerConnectionDependencyFactory::CreateAudioCapturer( |
| 746 int render_frame_id, |
| 747 const StreamDeviceInfo& device_info, |
| 748 const blink::WebMediaConstraints& constraints, |
| 749 MediaStreamAudioSource* audio_source) { |
| 750 // TODO(xians): Handle the cases when gUM is called without a proper render |
| 751 // view, for example, by an extension. |
| 752 DCHECK_GE(render_frame_id, 0); |
| 753 |
| 754 EnsureWebRtcAudioDeviceImpl(); |
| 755 DCHECK(GetWebRtcAudioDevice()); |
| 756 return WebRtcAudioCapturer::CreateCapturer( |
| 757 render_frame_id, device_info, constraints, GetWebRtcAudioDevice(), |
| 758 audio_source); |
| 759 } |
| 760 |
572 void PeerConnectionDependencyFactory::EnsureInitialized() { | 761 void PeerConnectionDependencyFactory::EnsureInitialized() { |
573 DCHECK(CalledOnValidThread()); | 762 DCHECK(CalledOnValidThread()); |
574 GetPcFactory(); | 763 GetPcFactory(); |
575 } | 764 } |
576 | 765 |
577 scoped_refptr<base::SingleThreadTaskRunner> | 766 scoped_refptr<base::SingleThreadTaskRunner> |
578 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { | 767 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { |
579 DCHECK(CalledOnValidThread()); | 768 DCHECK(CalledOnValidThread()); |
580 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() | 769 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() |
581 : nullptr; | 770 : nullptr; |
582 } | 771 } |
583 | 772 |
584 scoped_refptr<base::SingleThreadTaskRunner> | 773 scoped_refptr<base::SingleThreadTaskRunner> |
585 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { | 774 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { |
586 DCHECK(CalledOnValidThread()); | 775 DCHECK(CalledOnValidThread()); |
587 return chrome_signaling_thread_.IsRunning() | 776 return chrome_signaling_thread_.IsRunning() |
588 ? chrome_signaling_thread_.task_runner() | 777 ? chrome_signaling_thread_.task_runner() |
589 : nullptr; | 778 : nullptr; |
590 } | 779 } |
591 | 780 |
592 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { | 781 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { |
593 if (audio_device_.get()) | 782 if (audio_device_.get()) |
594 return; | 783 return; |
595 | 784 |
596 audio_device_ = new WebRtcAudioDeviceImpl(); | 785 audio_device_ = new WebRtcAudioDeviceImpl(); |
597 } | 786 } |
598 | 787 |
599 } // namespace content | 788 } // namespace content |
OLD | NEW |