source: trunk/src_apollo/org/apollo/audio/ApolloPlaybackMixer.java@ 315

Last change on this file since 315 was 315, checked in by bjn8, 16 years ago

Apollo spin-off added

File size: 22.5 KB
Line 
1package org.apollo.audio;
2
3import java.util.ArrayList;
4import java.util.Collection;
5import java.util.LinkedList;
6import java.util.List;
7import java.util.Set;
8import java.util.TreeSet;
9
10import javax.sound.sampled.AudioFormat;
11import javax.sound.sampled.DataLine;
12import javax.sound.sampled.LineUnavailableException;
13import javax.sound.sampled.SourceDataLine;
14
15import org.apollo.mvc.AbstractSubject;
16import org.apollo.mvc.SubjectChangedEvent;
17import org.apollo.util.AudioSystemLog;
18import org.apollo.util.TrackModelHandler;
19import org.apollo.util.TrackModelLoadManager;
20
21/**
22 * A software playback mixer.
23 *
24 * @author Brook Novak
25 *
26 */
27public class ApolloPlaybackMixer extends AbstractSubject implements TrackModelHandler {
28
29 /** The timeline frame represents the global frame counter which all tracks synchronize to. */
30 private long timelineFrame = 0; // Wrapping ignored.
31
32 private TreeSet<TrackSequence> sequenceGraph = new TreeSet<TrackSequence>();
33
34 private PlaybackThread playbackThread = null;
35
36 private float masterVolume = 1.0f;
37 private boolean isMasterMuteOn = false;
38
39 private boolean isSoloEnable = false;
40
41 private static ApolloPlaybackMixer instance = new ApolloPlaybackMixer();
42 private ApolloPlaybackMixer() {
43 // When a TrackModel is loading - look in here to see if one is in memory
44 TrackModelLoadManager.getInstance().addTrackModelHandler(this);
45 }
46
47 public static ApolloPlaybackMixer getInstance() {
48 return instance;
49 }
50
51 /**
52 * Stops all playback... kills thread(s).
53 */
54 public void releaseResources() {
55
56 // Quickly stop playback thread
57 if (playbackThread != null)
58 playbackThread.stopPlayback(); // will die momenterially
59
60 // Release references .. dispose of track memory
61 stopAll();
62
63 }
64
65
66 /**
67 * Sets the master volume of the output mixer.
68 *
69 * The master volume is not adjusted directly in the hardware due to java sounds
70 * sketchy API... the master volume is simulated in software.
71 *
72 * An AudioControlValueChangedEvent event is fired with a FloatControl.Type.VOLUME
73 * type and the volume (a float, range of 0-1 clamped) is passed as the value
74 * of the new volume.
75 *
76 * The volumes are updated for all audios created with the SampledAudioManager
77 * to the new mix.
78 *
79 * @param volume The new volume to set it to. Ranges from 0-1.
80 */
81 public void setMasterVolume(float volume) {
82
83 // Clamp volume argument
84 if (volume < 0.0f)
85 volume = 0.0f;
86 else if (volume > 1.0f)
87 volume = 1.0f;
88
89 if (volume == masterVolume)
90 return;
91
92 masterVolume = volume;
93
94 // Notify obsevers
95 fireSubjectChanged(new SubjectChangedEvent(ApolloSubjectChangedEvent.VOLUME));
96 }
97
98
99 /**
100 * Sets the master mute of the output mixer.
101 *
102 * The master mute is not adjusted directly in the hardware due to java sounds
103 * sketchy API... the master mute is simulated in software.
104 *
105 * An AudioSubjectChangedEvent.MUTE event is fired with a BooleanControl.Type.MUTE
106 * type and the mute is passed as the value...
107 *
108 * The mutes are updated for all audios created with the SampledAudioManager
109 * to the new mix.
110 *
111 * @param mute
112 */
113 public void setMasterMute(boolean muteOn) {
114 if (muteOn == isMasterMuteOn)
115 return;
116
117 isMasterMuteOn = muteOn;
118
119 // Notify obsevers
120 fireSubjectChanged(new SubjectChangedEvent(ApolloSubjectChangedEvent.MUTE));
121
122 }
123
124 /**
125 * @return
126 * True if master mute is on.
127 */
128 public boolean isMasterMuteOn() {
129 return isMasterMuteOn;
130 }
131
132 /**
133 * @return
134 * The master volume. Always between 0 and 1.
135 */
136 public float getMasterVolume() {
137 return masterVolume;
138 }
139
140 /**
141 * @return
142 * True if the mixer is in solo mode.
143 */
144 public boolean isSoloEnable() {
145 return isSoloEnable;
146 }
147
148 /**
149 * In solomode, the only track sequences that are played are those with
150 * the solo flag set.
151 *
152 * @param isSoloEnable
153 * True to set into solo mode.
154 */
155 public void setSoloEnable(boolean isSoloEnable) {
156 this.isSoloEnable = isSoloEnable;
157 }
158
159 /**
160 * Sets all track sequences that are playing / queued to play - solo flag to false.
161 */
162 public void unsetAllSoloFlags() {
163 synchronized(sequenceGraph) {
164 for (TrackSequence ts : sequenceGraph) ts.isSolo = false;
165 }
166 }
167
168
169 /**
170 * Plays a track at the given relative initiation time to the current
171 * (global) playback position.
172 *
173 * If the track is already playing, or is about to play, then nothing will result in
174 * invoking this call.
175 *
176 * @param track
177 * The track to play.
178 *
179 * @return
180 * True if queued for playing. False if track already is in the track graph.
181 *
182 * @throws NullPointerException
183 * If track is null.
184 *
185 * @throws IllegalArgumentException
186 * If track has already been played before.
187 *
188 * @throws LineUnavailableException
189 * If failed to get data line to output device
190 */
191 public boolean play(TrackSequence track) throws LineUnavailableException {
192 if (track == null) throw new NullPointerException("track");
193 if (track.hasFinished()) throw new IllegalArgumentException("track is stale, must create new instance");
194
195 // Add to graph
196 synchronized(sequenceGraph) {
197
198 if (sequenceGraph.contains(track)) {
199 return false; // if already playing / queued to play then ignore
200 }
201
202 // Set initiation to commence relative to the current timeline.
203 track.initiationFrame = timelineFrame + track.getRelativeInitiationFrame();
204
205 sequenceGraph.add(track);
206
207 }
208
209 // Ensure that the added track will play
210 commencePlayback();
211
212 return true;
213
214 }
215
216 /**
217 * Plays a group of tracks exactly at the scheduled frame-time with respect to each other.
218 *
219 * For tracks in the given set that are already playing, or are queued for playing, then
220 * they will be ignored.
221 *
222 * @param tracks
223 * A set of sequence tracks to play together exactly at their relative initiation points.
224 *
225 * @throws NullPointerException
226 * If tracks is null.
227 *
228 * @throws LineUnavailableException
229 * If failed to get data line to output device
230 */
231 public void playSynchronized(Set<TrackSequence> tracks) throws LineUnavailableException {
232 if (tracks == null) throw new NullPointerException("tracks");
233
234 // Add to graph
235 synchronized(sequenceGraph) {
236
237 long initiationTimeOffset = timelineFrame;
238
239 // If the playback thread is running... and the new tracks will begin playback automatically, then
240 // schedule the group of tracks to play in the next pass so they all begin together.
241 if (playbackThread != null && !playbackThread.isStopping() && !playbackThread.isAlive()) {
242 initiationTimeOffset += playbackThread.bufferFrameLength;
243 }
244
245 for (TrackSequence ts : tracks) {
246
247 if (ts == null) continue;
248
249 if (sequenceGraph.contains(ts)) {
250 continue; // if already playing / queued to play then ignore
251 }
252
253 ts.initiationFrame = ts.getRelativeInitiationFrame() + initiationTimeOffset;
254 sequenceGraph.add(ts);
255 }
256 }
257
258 // Ensure that the added tracks will play
259 commencePlayback();
260
261 }
262
263 /**
264 * Ensures that the playback thread is playing / wiull keep playing.
265 * If a new track is added top the sequence graph, then by calling this it will
266 * ensure that it will be played.
267 *
268 * @throws LineUnavailableException
269 * If failed to get data line to output device
270 */
271 private void commencePlayback() throws LineUnavailableException {
272
273 if (playbackThread != null && playbackThread.isAlive() && playbackThread.isStopping()) {
274
275 try {
276
277 playbackThread.join(); // wait for old play proccess to finish
278
279 } catch (InterruptedException e) {
280
281 e.printStackTrace();
282 return;
283
284 }
285 playbackThread = null;
286 }
287
288 if (playbackThread != null && !playbackThread.isAlive()) {
289 playbackThread = null;
290 }
291
292 // If playbackThread is not null at this point, then it is assumed that it is still playing
293 // therefore does not need to be started/restarted.
294
295 // If the playback thread is dead, create a new one to initiate playback
296 if (playbackThread == null) {
297
298 playbackThread = new PlaybackThread();
299 playbackThread.start();
300
301 }
302
303 }
304
305 /**
306 * Stops many track sequences. This is better than calling
307 * {@link #stop(TrackSequence)} because ensures that all tracks are stopped
308 * at the same time.
309 *
310 * @param tracks
311 * The tracks to stop.
312 */
313 public void stop(Collection<TrackSequence> tracks) {
314 if (tracks == null || playbackThread == null) return;
315
316
317 synchronized(sequenceGraph) {
318 for (TrackSequence track : tracks) {
319 stop(track);
320 }
321 }
322
323 }
324
325 /**
326 * Stops a track sequence. Nonblocking, the actual stopp will occur when the mixer has a change to
327 * respond.
328 *
329 * @param track
330 * The track to stop. If null then will return with no effect.
331 */
332 public void stop(TrackSequence track) {
333 if (track == null || playbackThread == null) return;
334
335 track.stopPending = true;
336 }
337
338 /**
339 * Stops all track sequences from playback.
340 */
341 public void stopAll() {
342
343 synchronized(sequenceGraph) {
344 for (TrackSequence track : sequenceGraph)
345 track.stopPending = true;
346 }
347
348 }
349
350 /**
351 * {@inheritDoc}
352 */
353 public SampledTrackModel getSharedSampledTrackModel(String localfilename) {
354 if (localfilename == null) return null;
355
356 // Get a snapshot of the graph
357 ArrayList<TrackSequence> snapshot = null;
358 synchronized(sequenceGraph) {
359 snapshot = new ArrayList<TrackSequence>(sequenceGraph);
360 }
361
362 // Look for SampledTrackModel-invoked sequences
363 for (TrackSequence ts : snapshot) {
364 Object invoker = ts.getInvoker();
365
366 if (invoker != null && invoker instanceof SampledTrackModel) {
367
368 // Match?
369 if (localfilename.equals(((SampledTrackModel)invoker).getLocalFilename())) {
370 return (SampledTrackModel)invoker; // found match
371 }
372 }
373
374 }
375
376 // Nothing matched
377 return null;
378 }
379
380
381 /**
382 * @return
383 * The actual frame position in the playback stream - that is, the amount of
384 * frames that have been rendered since playback commenced.
385 * Negative if there is no playback.
386 */
387 public long getLiveFramePosition() {
388
389 if (playbackThread != null) {
390 if (playbackThread.srcDataLine.isOpen()) {
391
392 // The timelineFrame should always be larger or equal to the live frame position
393 // assert(timelineFrame >= playbackThread.srcDataLine.getLongFramePosition());
394
395 return playbackThread.srcDataLine.getLongFramePosition();
396 }
397 }
398
399 return -1;
400 }
401
402 /**
403 * @return
404 * The audio format of the current playback data line. Null if not avilable - never
405 * null if in a playing state.
406 */
407 public AudioFormat getLiveAudioFormat() {
408 if (playbackThread != null) {
409 return playbackThread.srcDataLine.getFormat();
410 }
411 return null;
412 }
413
414 /**
415 * The Audio Mixing pipeline.
416 *
417 * All audio mixing math/logic is done within this thread.
418 * Keeps running until the sequenceGraph is empty.
419 * Removes tracks from the sequenceGraph automatically when they are finished.
420 *
421 *
422 * @author Brook Novak
423 *
424 */
425 private class PlaybackThread extends Thread {
426
427 private SourceDataLine srcDataLine; // never null
428
429 private int bufferFrameLength;
430 private boolean isOutputBigEndian;
431
432 /**
433 * Initantly prepares for audio playback: Opens the source data line for output
434 *
435 * @throws LineUnavailableException
436 */
437 PlaybackThread() throws LineUnavailableException {
438 super("Apollo Playback Mixer Thread");
439 super.setPriority(Thread.MAX_PRIORITY);
440
441 assert(playbackThread == null); // there should be only one instance of this ever.
442
443 // Upon creation, open a source data line
444 aquireSourceDataLine();
445
446
447 // Reset the global timeline frame to match the live frame position. i.e. wrap back at zero.
448 synchronized(sequenceGraph) { // probably will be empty, but just for safety...
449
450 for (TrackSequence ts : sequenceGraph) {
451 ts.initiationFrame -= timelineFrame;
452 if (ts.initiationFrame < 0)
453 ts.initiationFrame = 0;
454 }
455
456 timelineFrame = 0;
457 }
458 }
459
460 /**
461 * Opens the source data line for output.
462 *
463 * @throws LineUnavailableException
464 * If failed to acquire the source data line.
465 */
466 private void aquireSourceDataLine() throws LineUnavailableException {
467
468 // Select an audio output format
469 DataLine.Info info = new DataLine.Info(
470 SourceDataLine.class,
471 getAudioFormat());
472
473 // Get the source data line to output.
474 srcDataLine = (SourceDataLine)
475 SampledAudioManager.getInstance().getOutputMixure().getLine(info); // LineUnavailableException
476
477 srcDataLine.open(); // LineUnavailableException
478
479 // Cache useful data
480 bufferFrameLength = srcDataLine.getBufferSize() / 2;
481 isOutputBigEndian = srcDataLine.getFormat().isBigEndian();
482
483 assert(bufferFrameLength > 0);
484 assert(srcDataLine.getFormat().getSampleSizeInBits() == 16);
485
486 }
487
488 /**
489 * Closes the data line so that if currently writing bytes or the next time
490 * bytes are written, the thread will end.
491 * Does not block calling thread. Thus may take a little while for thread to
492 * end after call returned.
493 */
494 public void stopPlayback() {
495 srcDataLine.close();
496 }
497
498 /**
499 * Note: even if all tracks have been proccessed in the audio pipeline, it will
500 * commence another pass to check for new tracks added to the graph before finishing.
501 *
502 * @return True if stopping and will not play any tracks added to the queue.
503 */
504 public boolean isStopping() {
505 return !srcDataLine.isOpen();
506 }
507
508 /**
509 * @return the best audio format for playback...
510 */
511 private AudioFormat getAudioFormat() {
512 return SampledAudioManager.getInstance().getDefaultPlaybackFormat();
513 }
514
515 /**
516 * The audio mixing pipeline
517 */
518 public void run() {
519
520 // Notify observers that some audio has started playing
521 AudioSystemLog.println("AUDIO PIPELINE THREAD STARTED");
522
523 ApolloPlaybackMixer.this.fireSubjectChangedLaterOnSwingThread(
524 new SubjectChangedEvent(ApolloSubjectChangedEvent.PLAYBACK_STARTED));
525
526 // All tracks to play per pass
527 List<TrackSequence> tracksToPlay = new LinkedList<TrackSequence>();
528
529 // Keeps track of tracks to remove
530 List<TrackSequence> completedTracks = new LinkedList<TrackSequence>();
531
532 // The buffer written directly to the source data line
533 byte[] sampleBuffer = new byte[2 * bufferFrameLength];
534
535 // The mixed frames, where each element refers to a frame
536 int[] mixedFrameBuffer = new int[bufferFrameLength];
537
538 // Helpers declared outside loop for mz efficiency
539 int msb, lsb;
540 int sample;
541 int totalFramesMixed;
542 int trackCount; // tracks to play at a given pass
543 boolean isMoreQueued; // True if there are more tracks queued.
544 int frameIndex;
545 int i;
546
547 // Begin writing to the source data line
548 if (srcDataLine.isOpen())
549 srcDataLine.start();
550 else return;
551
552 // keep playing as long as line is open (and there is something to play)
553 try
554 {
555 while (srcDataLine.isOpen()) { // The audio mixing pipline
556
557 // First decide on which tracks to play ... and remove any finished tracks.
558 synchronized(sequenceGraph) {
559
560 // If there are no more tracks queued for playing, then exit the
561 // playback thread.
562 if (sequenceGraph.isEmpty())
563 return;
564
565 isMoreQueued = false;
566 completedTracks.clear();
567 tracksToPlay.clear();
568
569 for (TrackSequence ts : sequenceGraph) {
570
571 // Has this track sequence finished?
572 if (ts.currentFrame > ts.endFrame || ts.stopPending)
573 completedTracks.add(ts);
574
575 // Is this track playing / is meant to start laying in this pass?
576 else if (ts.initiationFrame <= (timelineFrame + bufferFrameLength))
577 tracksToPlay.add(ts);
578
579 // If it is not time to play the track yet, then
580 // neither will it for all proceeding tracks either
581 // since they are ordered by their initiation time.
582 else break;
583
584 }
585
586 // Get rid of tracks that have finished playing. Notify models that they have stopped
587 for (TrackSequence staleTS : completedTracks) {
588
589 sequenceGraph.remove(staleTS);
590
591 staleTS.onStopped((staleTS.currentFrame > staleTS.endFrame)
592 ? staleTS.endFrame : staleTS.currentFrame);
593
594 //removeTrackFromGraph(staleTS, staleTS.endFrame);
595 }
596
597 trackCount = tracksToPlay.size();
598 isMoreQueued = sequenceGraph.size() > trackCount;
599
600 // If there is nothing queued and there are no tracks to play,
601 // then playback is finished.
602 if (!isMoreQueued && trackCount == 0)
603 return;
604
605 } // release lock
606
607 totalFramesMixed = 0; // this will be set to the maximum amount of frames that were mixed accross all tracks
608
609 // Clear audio buffer
610 for (i = 0; i < bufferFrameLength; i++) // TODO: Effecient way of clearing buffer?
611 mixedFrameBuffer[i] = 0;
612
613 // Perform Mixing :
614 // Convert the sample size to 16-bit always for best precision while
615 // proccessing audio in the mix pipeline....
616 for (TrackSequence ts : tracksToPlay) {
617
618 // Notify model that initiated
619 if (!ts.isPlaying()) ts.onInitiated();
620
621 // Skip muted / unsoloed tracks - they add nothing to the sample mix
622 if (ts.isMuted || (isSoloEnable && !ts.isSolo)) {
623
624 // Make sure start where initiated, if not already initiated
625 if (ts.initiationFrame >= timelineFrame && ts.initiationFrame < (timelineFrame + bufferFrameLength)) {
626
627 // Get index in frame buffer where to initiate
628 frameIndex = (int)(ts.initiationFrame - timelineFrame);
629
630 // Calcuate the length of frames to buffer - adjust silent tracks position
631 ts.currentFrame += (bufferFrameLength - frameIndex);
632
633 } else { // skip full buffer of bytes ... silenced
634
635 ts.currentFrame += bufferFrameLength; // currentFrame can go outside endframe boundry of the track
636
637 }
638
639 totalFramesMixed = bufferFrameLength;
640
641 } else { // Get samples and add to mix
642
643 // If the track is yet to initiate - part way through the buffer, then start adding bytes
644 // at initiation point
645 if (ts.initiationFrame >= timelineFrame && ts.initiationFrame < (timelineFrame + bufferFrameLength)) {
646
647 frameIndex = (int)(ts.initiationFrame - timelineFrame);
648
649 } else {
650
651 frameIndex = 0;
652
653 }
654
655 // For each frame
656 for (;frameIndex < bufferFrameLength && ts.currentFrame <= ts.endFrame; frameIndex++) {
657
658 // Get sample according to byte order
659 if (ts.isBigEndian) {
660
661 // First byte is MSB (high order)
662 msb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame];
663
664 // Second byte is LSB (low order)
665 lsb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame + 1];
666
667 } else {
668
669 // First byte is LSB (low order)
670 lsb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame];
671
672 // Second byte is MSB (high order)
673 msb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame + 1];
674 }
675
676 sample = (msb << 0x8) | (0xFF & lsb);
677
678 // Apply track volume
679 sample = (int)(sample * ts.volume);
680
681 // Add to current mix
682 mixedFrameBuffer[frameIndex] += sample;
683
684 // Get next sample
685 ts.currentFrame++;
686 }
687
688
689 // Keep track of total frames mixed in buffer
690 if (frameIndex > totalFramesMixed)
691 totalFramesMixed = frameIndex;
692 }
693
694 } // Mix in next track
695
696 // totalFramesMixed is the amount of frames to play.
697 // If it is zero then it means that there are tracks yet to be intiated, and nothing currently playing
698 assert (totalFramesMixed <= bufferFrameLength);
699 assert (totalFramesMixed > 0 ||
700 (totalFramesMixed == 0 && trackCount == 0 && isMoreQueued));
701
702 // Post mix with master settings
703 if (isMasterMuteOn) { // Silence sample buffer if master mute is on
704
705 for (i = 0; i < sampleBuffer.length; i++) {
706 sampleBuffer[i] = 0;
707 }
708
709 // Let the muted bytes play
710 totalFramesMixed = bufferFrameLength;
711
712 } else { // otherwise apply master volume
713
714 for (i = 0; i < totalFramesMixed; i++) {
715
716 // Average tracks
717 //mixedFrameBuffer[i] /= trackCount; // depreciated
718
719 // Apply mastar volume
720 mixedFrameBuffer[i] = (int)(mixedFrameBuffer[i] * masterVolume);
721
722 // Clip
723 if (mixedFrameBuffer[i] > Short.MAX_VALUE) mixedFrameBuffer[i] = Short.MAX_VALUE;
724 else if (mixedFrameBuffer[i] < Short.MIN_VALUE) mixedFrameBuffer[i] = Short.MIN_VALUE;
725
726 // Convert to output format
727 lsb = (mixedFrameBuffer[i] & 0xFF);
728 msb = ((mixedFrameBuffer[i] >> 8) & 0xFF);
729
730 if (isOutputBigEndian) {
731 sampleBuffer[i+i] = (byte)msb;
732 sampleBuffer[i+i+1] = (byte)lsb;
733 } else {
734 sampleBuffer[i+i] = (byte)lsb;
735 sampleBuffer[i+i+1] = (byte)msb;
736 }
737
738 }
739
740 }
741
742 // Generate silence only if there are more tracks to be played.
743 // Note that this could be false, but a track might have been queued after
744 // setting the isMoreQueued flag. In such cases... silence is not wanted anyway!
745 if (isMoreQueued) {
746 for (i = totalFramesMixed; i < bufferFrameLength; i++) { // will skip if no need to generate silence
747 sampleBuffer[i+i] = 0;
748 sampleBuffer[i+i+1] = 0;
749 }
750 // Ensure that full buffer is played ... including the silence
751 totalFramesMixed = bufferFrameLength;
752 }
753
754 // Write proccessed bytes to line out stream and update the timeline frame
755 srcDataLine.write(
756 sampleBuffer,
757 0,
758 totalFramesMixed * 2);
759
760 // Update timeline counter for sequencing management
761 timelineFrame += totalFramesMixed;
762
763 // The timelineFrame should always be larger or equal to the live frame position
764 assert(timelineFrame >= srcDataLine.getLongFramePosition());
765
766 } // Next pass
767
768 } finally {
769
770 // Ensure line freed
771 if (srcDataLine.isOpen()) {
772 srcDataLine.close();
773 }
774
775 // Clear sequence graph.
776 synchronized(sequenceGraph) {
777
778 for (TrackSequence track : sequenceGraph) {
779
780 track.onStopped((track.currentFrame > track.endFrame)
781 ? track.endFrame : track.currentFrame);
782 }
783
784 sequenceGraph.clear();
785
786 }
787
788 AudioSystemLog.println("AUDIO PIPELINE THREAD KILLED");
789
790 // Notify observers that playback has finished.
791 ApolloPlaybackMixer.this.fireSubjectChangedLaterOnSwingThread(
792 new SubjectChangedEvent(ApolloSubjectChangedEvent.PLAYBACK_STOPPED));
793
794 }
795
796 }
797
798
799
800 }
801
802
803}
Note: See TracBrowser for help on using the repository browser.