| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2012 Google Inc. All rights reserved. | 2 * Copyright (C) 2012 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright | 8 * 1. Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright | 10 * 2. Redistributions in binary form must reproduce the above copyright |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 47 // The state can only transition to the next state, except for the FINISHED_
STATE which can | 47 // The state can only transition to the next state, except for the FINISHED_
STATE which can |
| 48 // never be changed. | 48 // never be changed. |
| 49 enum PlaybackState { | 49 enum PlaybackState { |
| 50 // These must be defined with the same names and values as in the .idl f
ile. | 50 // These must be defined with the same names and values as in the .idl f
ile. |
| 51 UNSCHEDULED_STATE = 0, | 51 UNSCHEDULED_STATE = 0, |
| 52 SCHEDULED_STATE = 1, | 52 SCHEDULED_STATE = 1, |
| 53 PLAYING_STATE = 2, | 53 PLAYING_STATE = 2, |
| 54 FINISHED_STATE = 3 | 54 FINISHED_STATE = 3 |
| 55 }; | 55 }; |
| 56 | 56 |
| 57 // This helper class handles the lifetime of an AudioScheduledSourceNode wit
h an onended event |
| 58 // listener. This keeps the node alive until the event listener is processed
. |
| 59 class NotifyEndedTask { |
| 60 public: |
| 61 NotifyEndedTask(PassRefPtr<AudioScheduledSourceNode> scheduledNode); |
| 62 void notifyEnded(); |
| 63 |
| 64 private: |
| 65 RefPtr<AudioScheduledSourceNode> m_scheduledNode; |
| 66 }; |
| 67 |
| 57 AudioScheduledSourceNode(AudioContext*, float sampleRate); | 68 AudioScheduledSourceNode(AudioContext*, float sampleRate); |
| 58 | 69 |
| 59 // Scheduling. | 70 // Scheduling. |
| 60 void start(double when); | 71 void start(double when); |
| 61 void stop(double when); | 72 void stop(double when); |
| 62 | 73 |
| 63 void noteOn(double when); | 74 void noteOn(double when); |
| 64 void noteOff(double when); | 75 void noteOff(double when); |
| 65 | 76 |
| 66 unsigned short playbackState() const { return static_cast<unsigned short>(m_
playbackState); } | 77 unsigned short playbackState() const { return static_cast<unsigned short>(m_
playbackState); } |
| (...skipping 13 matching lines...) Expand all Loading... |
| 80 // nonSilentFramesToProcess : Number of frames rendering non-silence (will b
e <= quantumFrameSize). | 91 // nonSilentFramesToProcess : Number of frames rendering non-silence (will b
e <= quantumFrameSize). |
| 81 void updateSchedulingInfo(size_t quantumFrameSize, | 92 void updateSchedulingInfo(size_t quantumFrameSize, |
| 82 AudioBus* outputBus, | 93 AudioBus* outputBus, |
| 83 size_t& quantumFrameOffset, | 94 size_t& quantumFrameOffset, |
| 84 size_t& nonSilentFramesToProcess); | 95 size_t& nonSilentFramesToProcess); |
| 85 | 96 |
| 86 // Called when we have no more sound to play or the noteOff() time has been
reached. | 97 // Called when we have no more sound to play or the noteOff() time has been
reached. |
| 87 virtual void finish(); | 98 virtual void finish(); |
| 88 | 99 |
| 89 static void notifyEndedDispatch(void*); | 100 static void notifyEndedDispatch(void*); |
| 90 void notifyEnded(); | |
| 91 | 101 |
| 92 PlaybackState m_playbackState; | 102 PlaybackState m_playbackState; |
| 93 | 103 |
| 94 // m_startTime is the time to start playing based on the context's timeline
(0 or a time less than the context's current time means "now"). | 104 // m_startTime is the time to start playing based on the context's timeline
(0 or a time less than the context's current time means "now"). |
| 95 double m_startTime; // in seconds | 105 double m_startTime; // in seconds |
| 96 | 106 |
| 97 // m_endTime is the time to stop playing based on the context's timeline (0
or a time less than the context's current time means "now"). | 107 // m_endTime is the time to stop playing based on the context's timeline (0
or a time less than the context's current time means "now"). |
| 98 // If it hasn't been set explicitly, then the sound will not stop playing (i
f looping) or will stop when the end of the AudioBuffer | 108 // If it hasn't been set explicitly, then the sound will not stop playing (i
f looping) or will stop when the end of the AudioBuffer |
| 99 // has been reached. | 109 // has been reached. |
| 100 double m_endTime; // in seconds | 110 double m_endTime; // in seconds |
| 101 | 111 |
| 102 bool m_hasEndedListener; | 112 bool m_hasEndedListener; |
| 103 | 113 |
| 104 static const double UnknownTime; | 114 static const double UnknownTime; |
| 105 }; | 115 }; |
| 106 | 116 |
| 107 } // namespace WebCore | 117 } // namespace WebCore |
| 108 | 118 |
| 109 #endif // AudioScheduledSourceNode_h | 119 #endif // AudioScheduledSourceNode_h |
| OLD | NEW |