| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
| 6 * met: | 6 * met: |
| 7 * | 7 * |
| 8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 236 MediaSource::logAndThrowDOMException(exceptionState, InvalidStateError,
"The timestamp offset may not be set while the SourceBuffer's append state is 'P
ARSING_MEDIA_SEGMENT'."); | 236 MediaSource::logAndThrowDOMException(exceptionState, InvalidStateError,
"The timestamp offset may not be set while the SourceBuffer's append state is 'P
ARSING_MEDIA_SEGMENT'."); |
| 237 return; | 237 return; |
| 238 } | 238 } |
| 239 | 239 |
| 240 // 7. Update the attribute to new timestamp offset. | 240 // 7. Update the attribute to new timestamp offset. |
| 241 m_timestampOffset = offset; | 241 m_timestampOffset = offset; |
| 242 } | 242 } |
| 243 | 243 |
| 244 AudioTrackList& SourceBuffer::audioTracks() | 244 AudioTrackList& SourceBuffer::audioTracks() |
| 245 { | 245 { |
| 246 DCHECK(RuntimeEnabledFeatures::audioVideoTracksEnabled()); | |
| 247 return *m_audioTracks; | 246 return *m_audioTracks; |
| 248 } | 247 } |
| 249 | 248 |
| 250 VideoTrackList& SourceBuffer::videoTracks() | 249 VideoTrackList& SourceBuffer::videoTracks() |
| 251 { | 250 { |
| 252 DCHECK(RuntimeEnabledFeatures::audioVideoTracksEnabled()); | |
| 253 return *m_videoTracks; | 251 return *m_videoTracks; |
| 254 } | 252 } |
| 255 | 253 |
| 256 double SourceBuffer::appendWindowStart() const | 254 double SourceBuffer::appendWindowStart() const |
| 257 { | 255 { |
| 258 return m_appendWindowStart; | 256 return m_appendWindowStart; |
| 259 } | 257 } |
| 260 | 258 |
| 261 void SourceBuffer::setAppendWindowStart(double start, ExceptionState& exceptionS
tate) | 259 void SourceBuffer::setAppendWindowStart(double start, ExceptionState& exceptionS
tate) |
| 262 { | 260 { |
| (...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 515 if (isRemoved()) | 513 if (isRemoved()) |
| 516 return; | 514 return; |
| 517 | 515 |
| 518 BLINK_SBLOG << __func__ << " this=" << this; | 516 BLINK_SBLOG << __func__ << " this=" << this; |
| 519 if (m_pendingRemoveStart != -1) { | 517 if (m_pendingRemoveStart != -1) { |
| 520 cancelRemove(); | 518 cancelRemove(); |
| 521 } else { | 519 } else { |
| 522 abortIfUpdating(); | 520 abortIfUpdating(); |
| 523 } | 521 } |
| 524 | 522 |
| 525 if (RuntimeEnabledFeatures::audioVideoTracksEnabled()) { | 523 DCHECK(m_source); |
| 526 DCHECK(m_source); | 524 if (m_source->mediaElement()->audioTracks().length() > 0 |
| 527 if (m_source->mediaElement()->audioTracks().length() > 0 | 525 || m_source->mediaElement()->videoTracks().length() > 0) { |
| 528 || m_source->mediaElement()->videoTracks().length() > 0) { | 526 removeMediaTracks(); |
| 529 removeMediaTracks(); | |
| 530 } | |
| 531 } | 527 } |
| 532 | 528 |
| 533 m_webSourceBuffer->removedFromMediaSource(); | 529 m_webSourceBuffer->removedFromMediaSource(); |
| 534 m_webSourceBuffer.reset(); | 530 m_webSourceBuffer.reset(); |
| 535 m_source = nullptr; | 531 m_source = nullptr; |
| 536 m_asyncEventQueue = nullptr; | 532 m_asyncEventQueue = nullptr; |
| 537 } | 533 } |
| 538 | 534 |
| 539 double SourceBuffer::highestPresentationTimestamp() | 535 double SourceBuffer::highestPresentationTimestamp() |
| 540 { | 536 { |
| 541 DCHECK(!isRemoved()); | 537 DCHECK(!isRemoved()); |
| 542 | 538 |
| 543 double pts = m_webSourceBuffer->highestPresentationTimestamp(); | 539 double pts = m_webSourceBuffer->highestPresentationTimestamp(); |
| 544 BLINK_SBLOG << __func__ << " this=" << this << ", pts=" << pts; | 540 BLINK_SBLOG << __func__ << " this=" << this << ", pts=" << pts; |
| 545 return pts; | 541 return pts; |
| 546 } | 542 } |
| 547 | 543 |
| 548 void SourceBuffer::removeMediaTracks() | 544 void SourceBuffer::removeMediaTracks() |
| 549 { | 545 { |
| 550 DCHECK(RuntimeEnabledFeatures::audioVideoTracksEnabled()); | |
| 551 // Spec: http://w3c.github.io/media-source/#widl-MediaSource-removeSourceBuf
fer-void-SourceBuffer-sourceBuffer | 546 // Spec: http://w3c.github.io/media-source/#widl-MediaSource-removeSourceBuf
fer-void-SourceBuffer-sourceBuffer |
| 552 DCHECK(m_source); | 547 DCHECK(m_source); |
| 553 | 548 |
| 554 HTMLMediaElement* mediaElement = m_source->mediaElement(); | 549 HTMLMediaElement* mediaElement = m_source->mediaElement(); |
| 555 DCHECK(mediaElement); | 550 DCHECK(mediaElement); |
| 556 // 3. Let SourceBuffer audioTracks list equal the AudioTrackList object retu
rned by sourceBuffer.audioTracks. | 551 // 3. Let SourceBuffer audioTracks list equal the AudioTrackList object retu
rned by sourceBuffer.audioTracks. |
| 557 // 4. If the SourceBuffer audioTracks list is not empty, then run the follow
ing steps: | 552 // 4. If the SourceBuffer audioTracks list is not empty, then run the follow
ing steps: |
| 558 // 4.1 Let HTMLMediaElement audioTracks list equal the AudioTrackList object
returned by the audioTracks attribute on the HTMLMediaElement. | 553 // 4.1 Let HTMLMediaElement audioTracks list equal the AudioTrackList object
returned by the audioTracks attribute on the HTMLMediaElement. |
| 559 // 4.2 Let the removed enabled audio track flag equal false. | 554 // 4.2 Let the removed enabled audio track flag equal false. |
| 560 bool removedEnabledAudioTrack = false; | 555 bool removedEnabledAudioTrack = false; |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 660 return trackDefault ? AtomicString(trackDefault->language()) : ""; | 655 return trackDefault ? AtomicString(trackDefault->language()) : ""; |
| 661 } | 656 } |
| 662 | 657 |
| 663 bool SourceBuffer::initializationSegmentReceived(const WebVector<MediaTrackInfo>
& newTracks) | 658 bool SourceBuffer::initializationSegmentReceived(const WebVector<MediaTrackInfo>
& newTracks) |
| 664 { | 659 { |
| 665 BLINK_SBLOG << __func__ << " this=" << this << " tracks=" << newTracks.size(
); | 660 BLINK_SBLOG << __func__ << " this=" << this << " tracks=" << newTracks.size(
); |
| 666 DCHECK(m_source); | 661 DCHECK(m_source); |
| 667 DCHECK(m_source->mediaElement()); | 662 DCHECK(m_source->mediaElement()); |
| 668 DCHECK(m_updating); | 663 DCHECK(m_updating); |
| 669 | 664 |
| 670 if (!RuntimeEnabledFeatures::audioVideoTracksEnabled()) { | |
| 671 if (!m_firstInitializationSegmentReceived) { | |
| 672 m_source->setSourceBufferActive(this); | |
| 673 m_firstInitializationSegmentReceived = true; | |
| 674 } | |
| 675 return true; | |
| 676 } | |
| 677 | |
| 678 // Implementation of Initialization Segment Received, see | 665 // Implementation of Initialization Segment Received, see |
| 679 // https://w3c.github.io/media-source/#sourcebuffer-init-segment-received | 666 // https://w3c.github.io/media-source/#sourcebuffer-init-segment-received |
| 680 | 667 |
| 681 // Sort newTracks into audio and video tracks to facilitate implementation | 668 // Sort newTracks into audio and video tracks to facilitate implementation |
| 682 // of subsequent steps of this algorithm. | 669 // of subsequent steps of this algorithm. |
| 683 Vector<MediaTrackInfo> newAudioTracks; | 670 Vector<MediaTrackInfo> newAudioTracks; |
| 684 Vector<MediaTrackInfo> newVideoTracks; | 671 Vector<MediaTrackInfo> newVideoTracks; |
| 685 for (const MediaTrackInfo& trackInfo : newTracks) { | 672 for (const MediaTrackInfo& trackInfo : newTracks) { |
| 686 const TrackBase* track = nullptr; | 673 const TrackBase* track = nullptr; |
| 687 if (trackInfo.trackType == WebMediaPlayer::AudioTrack) { | 674 if (trackInfo.trackType == WebMediaPlayer::AudioTrack) { |
| (...skipping 572 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1260 visitor->trace(m_removeAsyncPartRunner); | 1247 visitor->trace(m_removeAsyncPartRunner); |
| 1261 visitor->trace(m_appendStreamAsyncPartRunner); | 1248 visitor->trace(m_appendStreamAsyncPartRunner); |
| 1262 visitor->trace(m_stream); | 1249 visitor->trace(m_stream); |
| 1263 visitor->trace(m_audioTracks); | 1250 visitor->trace(m_audioTracks); |
| 1264 visitor->trace(m_videoTracks); | 1251 visitor->trace(m_videoTracks); |
| 1265 EventTargetWithInlineData::trace(visitor); | 1252 EventTargetWithInlineData::trace(visitor); |
| 1266 ActiveDOMObject::trace(visitor); | 1253 ActiveDOMObject::trace(visitor); |
| 1267 } | 1254 } |
| 1268 | 1255 |
| 1269 } // namespace blink | 1256 } // namespace blink |
| OLD | NEW |