source: trunk/src/org/apollo/audio/ApolloPlaybackMixer.java@ 1007

Last change on this file since 1007 was 1007, checked in by davidb, 8 years ago

Generalization of audio support to allow playback/mixer to be stereo, plus some edits to comments

File size: 35.0 KB
Line 
1package org.apollo.audio;
2
3import java.util.ArrayList;
4import java.util.Collection;
5import java.util.LinkedList;
6import java.util.List;
7import java.util.Set;
8import java.util.TreeSet;
9
10import javax.sound.sampled.AudioFormat;
11import javax.sound.sampled.DataLine;
12import javax.sound.sampled.LineUnavailableException;
13import javax.sound.sampled.SourceDataLine;
14
15import org.apollo.mvc.AbstractSubject;
16import org.apollo.mvc.SubjectChangedEvent;
17import org.apollo.util.TrackModelHandler;
18import org.apollo.util.TrackModelLoadManager;
19
20/**
21 * A software playback mixer.
22 *
23 * @author Brook Novak
24 *
25 */
26public class ApolloPlaybackMixer extends AbstractSubject implements TrackModelHandler {
27
28 /** The timeline frame represents the global frame counter which all tracks synchronize to. */
29 private long timelineFrame = 0; // Wrapping ignored.
30
31 private TreeSet<TrackSequence> sequenceGraph = new TreeSet<TrackSequence>();
32
33 private PlaybackThread playbackThread = null;
34
35 private float masterVolume = 1.0f;
36 private boolean isMasterMuteOn = false;
37
38 private boolean isSoloEnable = false;
39
40 private static ApolloPlaybackMixer instance = new ApolloPlaybackMixer();
41 private ApolloPlaybackMixer() {
42 // When a TrackModel is loading - look in here to see if one is in memory
43 TrackModelLoadManager.getInstance().addTrackModelHandler(this);
44 }
45
46 public static ApolloPlaybackMixer getInstance() {
47 return instance;
48 }
49
50 /**
51 * Stops all playback... kills thread(s).
52 */
53 public void releaseResources() {
54
55 // Quickly stop playback thread
56 if (playbackThread != null)
57 playbackThread.stopPlayback(); // will die momenterially
58
59 // Release references .. dispose of track memory
60 stopAll();
61
62 }
63
64
65 /**
66 * Sets the master volume of the output mixer.
67 *
68 * The master volume is not adjusted directly in the hardware due to java sounds
69 * sketchy API... the master volume is simulated in software.
70 *
71 * An AudioControlValueChangedEvent event is fired with a FloatControl.Type.VOLUME
72 * type and the volume (a float, range of 0-1 clamped) is passed as the value
73 * of the new volume.
74 *
75 * The volumes are updated for all audios created with the SampledAudioManager
76 * to the new mix.
77 *
78 * @param volume The new volume to set it to. Ranges from 0-1.
79 */
80 public void setMasterVolume(float volume) {
81
82 // Clamp volume argument
83 if (volume < 0.0f)
84 volume = 0.0f;
85 else if (volume > 1.0f)
86 volume = 1.0f;
87
88 if (volume == masterVolume)
89 return;
90
91 masterVolume = volume;
92
93 // Notify obsevers
94 fireSubjectChanged(new SubjectChangedEvent(ApolloSubjectChangedEvent.VOLUME));
95 }
96
97
98 /**
99 * Sets the master mute of the output mixer.
100 *
101 * The master mute is not adjusted directly in the hardware due to java sounds
102 * sketchy API... the master mute is simulated in software.
103 *
104 * An AudioSubjectChangedEvent.MUTE event is fired with a BooleanControl.Type.MUTE
105 * type and the mute is passed as the value...
106 *
107 * The mutes are updated for all audios created with the SampledAudioManager
108 * to the new mix.
109 *
110 * @param mute
111 */
112 public void setMasterMute(boolean muteOn) {
113 if (muteOn == isMasterMuteOn)
114 return;
115
116 isMasterMuteOn = muteOn;
117
118 // Notify obsevers
119 fireSubjectChanged(new SubjectChangedEvent(ApolloSubjectChangedEvent.MUTE));
120
121 }
122
123 /**
124 * @return
125 * True if master mute is on.
126 */
127 public boolean isMasterMuteOn() {
128 return isMasterMuteOn;
129 }
130
131 /**
132 * @return
133 * The master volume. Always between 0 and 1.
134 */
135 public float getMasterVolume() {
136 return masterVolume;
137 }
138
139 /**
140 * @return
141 * True if the mixer is in solo mode.
142 */
143 public boolean isSoloEnable() {
144 return isSoloEnable;
145 }
146
147 /**
148 * In solomode, the only track sequences that are played are those with
149 * the solo flag set.
150 *
151 * @param isSoloEnable
152 * True to set into solo mode.
153 */
154 public void setSoloEnable(boolean isSoloEnable) {
155 this.isSoloEnable = isSoloEnable;
156 }
157
158 /**
159 * Sets all track sequences that are playing / queued to play - solo flag to false.
160 */
161 public void unsetAllSoloFlags() {
162 synchronized(sequenceGraph) {
163 for (TrackSequence ts : sequenceGraph) ts.isSolo = false;
164 }
165 }
166
167
168 /**
169 * Plays a track at the given relative initiation time to the current
170 * (global) playback position.
171 *
172 * If the track is already playing, or is about to play, then nothing will result in
173 * invoking this call.
174 *
175 * @param track
176 * The track to play.
177 *
178 * @return
179 * True if queued for playing. False if track already is in the track graph.
180 *
181 * @throws NullPointerException
182 * If track is null.
183 *
184 * @throws IllegalArgumentException
185 * If track has already been played before.
186 *
187 * @throws LineUnavailableException
188 * If failed to get data line to output device
189 */
190 public boolean play(TrackSequence track) throws LineUnavailableException {
191 if (track == null) throw new NullPointerException("track");
192 if (track.hasFinished()) throw new IllegalArgumentException("track is stale, must create new instance");
193
194 while (true) {
195 // Add to graph
196 synchronized(sequenceGraph) {
197
198 if (sequenceGraph.contains(track)) {
199 return false; // if already playing / queued to play then ignore
200 }
201
202 if (playbackThread == null || !playbackThread.isAlive() || !playbackThread.isStopping()) {
203 // Set initiation to commence relative to the current timeline.
204 track.initiationFrame = timelineFrame + track.getRelativeInitiationFrame();
205
206 sequenceGraph.add(track);
207
208 break;
209 }
210
211 }
212
213 // Cannot play if mixer is in a stopping state since it will stop all tracks when thread terminates
214 try {
215 playbackThread.join();
216 } catch (InterruptedException e) {
217 e.printStackTrace();
218 return false;
219 }
220 }
221
222 // Ensure that the added track will play
223 commencePlayback();
224
225 return true;
226
227 }
228
229 /**
230 * Plays a group of tracks exactly at the scheduled frame-time with respect to each other.
231 *
232 * For tracks in the given set that are already playing, or are queued for playing, then
233 * they will be ignored.
234 *
235 * @param tracks
236 * A set of sequence tracks to play together exactly at their relative initiation points.
237 *
238 * @throws NullPointerException
239 * If tracks is null.
240 *
241 * @throws LineUnavailableException
242 * If failed to get data line to output device
243 *
244 * @throws IllegalArgumentException
245 * If tracks is empty
246 */
247 public void playSynchronized(Set<TrackSequence> tracks) throws LineUnavailableException {
248 if (tracks == null) throw new NullPointerException("tracks");
249 if (tracks.isEmpty())
250 throw new IllegalArgumentException("tracks is empty");
251
252 while (true) {
253
254 // Add to graph
255 synchronized(sequenceGraph) {
256
257 if (playbackThread == null || !playbackThread.isAlive() || !playbackThread.isStopping()) {
258
259 long initiationTimeOffset = timelineFrame;
260
261 // If the playback thread is running... and the new tracks will begin playback automatically, then
262 // schedule the group of tracks to play in the next pass so they all begin together.
263 if (playbackThread != null && !playbackThread.isStopping() && playbackThread.isAlive()) {
264 initiationTimeOffset += playbackThread.bufferFrameLength;
265 }
266
267 for (TrackSequence ts : tracks) {
268
269 if (ts == null) continue;
270
271 if (sequenceGraph.contains(ts))
272 continue; // if already playing / queued to play then ignore
273
274 ts.initiationFrame = ts.getRelativeInitiationFrame() + initiationTimeOffset;
275 sequenceGraph.add(ts);
276
277 }
278
279 // Playback commencable
280 break;
281
282 }
283
284 }
285
286 // Cannot play if mixer is in a stopping state since it will stop all tracks when thread terminates
287 try {
288 playbackThread.join();
289 } catch (InterruptedException e) {
290 e.printStackTrace();
291 return;
292 }
293
294 }
295
296 // Ensure that the added tracks will play
297 commencePlayback();
298
299 }
300
301 /**
302 * Ensures that the playback thread is playing / will keep playing.
303 * If a new track is added top the sequence graph, then by calling this it will
304 * ensure that it will be played.
305 *
306 * @throws LineUnavailableException
307 * If failed to get data line to output device
308 */
309 private void commencePlayback() throws LineUnavailableException {
310
311 // Should not be in a stopping state...
312 assert (!(
313 playbackThread != null && playbackThread.isAlive() && playbackThread.isStopping()));
314
315 if (playbackThread != null && !playbackThread.isAlive()) playbackThread = null;
316
317 // If playbackThread is not null at this point, then it is assumed that it is still playing
318 // therefore does not need to be started/restarted.
319
320 // Before playback is commenced, ensure that the MIDI device is
321 Metronome.getInstance().release();
322
323 // If the playback thread is dead, create a new one to initiate playback
324 if (playbackThread == null) {
325
326 AudioFormat audio_format = SampledAudioManager.getInstance().getDefaultPlaybackFormat();
327 if (audio_format.getChannels()==2) {
328 playbackThread = new StereoPlaybackThread();
329 }
330 else {
331 playbackThread = new MonoPlaybackThread();
332 }
333 playbackThread.start();
334
335 }
336
337 }
338
339 /**
340 * Stops many track sequences. This is better than calling
341 * {@link #stop(TrackSequence)} because ensures that all tracks are stopped
342 * at the same time.
343 *
344 * @param tracks
345 * The tracks to stop.
346 */
347 public void stop(Collection<TrackSequence> tracks) {
348 if (tracks == null || playbackThread == null) return;
349
350
351 synchronized(sequenceGraph) {
352 for (TrackSequence track : tracks) {
353 stop(track);
354 }
355 }
356
357 }
358
359 /**
360 * Stops a track sequence. Nonblocking, the actual stopp will occur when the mixer has a change to
361 * respond.
362 *
363 * @param track
364 * The track to stop. If null then will return with no effect.
365 */
366 public void stop(TrackSequence track) {
367 if (track == null || playbackThread == null) return;
368
369 track.stopPending = true;
370 }
371
372 /**
373 * Stops all track sequences from playback.
374 */
375 public void stopAll() {
376
377 synchronized(sequenceGraph) {
378 for (TrackSequence track : sequenceGraph)
379 track.stopPending = true;
380 }
381
382 }
383
384 /**
385 * {@inheritDoc}
386 */
387 public SampledTrackModel getSharedSampledTrackModel(String localfilename) {
388 if (localfilename == null) return null;
389
390 // Get a snapshot of the graph
391 ArrayList<TrackSequence> snapshot = null;
392 synchronized(sequenceGraph) {
393 snapshot = new ArrayList<TrackSequence>(sequenceGraph);
394 }
395
396 // Look for SampledTrackModel-invoked sequences
397 for (TrackSequence ts : snapshot) {
398 Object invoker = ts.getInvoker();
399
400 if (invoker != null && invoker instanceof SampledTrackModel) {
401
402 // Match?
403 if (localfilename.equals(((SampledTrackModel)invoker).getLocalFilename())) {
404 return (SampledTrackModel)invoker; // found match
405 }
406 }
407
408 }
409
410 // Nothing matched
411 return null;
412 }
413
414
415 /**
416 * @return
417 * The actual frame position in the playback stream - that is, the amount of
418 * frames that have been rendered since playback commenced.
419 * Negative if there is no playback.
420 */
421 public long getLiveFramePosition() {
422
423 if (playbackThread != null) {
424 if (playbackThread.srcDataLine.isOpen()) {
425
426 // The timelineFrame should always be larger or equal to the live frame position
427 // assert(timelineFrame >= playbackThread.srcDataLine.getLongFramePosition());
428
429 return playbackThread.srcDataLine.getLongFramePosition();
430 }
431 }
432
433 return -1;
434 }
435
436 /**
437 * @return
438 * The audio format of the current playback data line. Null if not avilable - never
439 * null if in a playing state.
440 */
441 public AudioFormat getLiveAudioFormat() {
442 if (playbackThread != null) {
443 return playbackThread.srcDataLine.getFormat();
444 }
445 return null;
446 }
447
448 /**
449 * The Audio Mixing pipeline.
450 *
451 * All audio mixing math/logic is done within this thread.
452 * Keeps running until the sequenceGraph is empty.
453 * Removes tracks from the sequenceGraph automatically when they are finished.
454 *
455 *
456 * @author Brook Novak
457 *
458 */
459 private abstract class PlaybackThread extends Thread {
460
461 protected SourceDataLine srcDataLine; // never null
462
463 protected boolean isStopping = false;
464
465 protected int bufferFrameLength;
466 protected boolean isOutputBigEndian;
467
468 /**
469 * Initantly prepares for audio playback: Opens the source data line for output
470 *
471 * @throws LineUnavailableException
472 */
473 protected PlaybackThread(String threadName) throws LineUnavailableException {
474 super(threadName);
475 super.setPriority(Thread.MAX_PRIORITY);
476
477 assert(playbackThread == null); // there should be only one instance of this ever.
478
479 // Upon creation, open a source data line
480 aquireSourceDataLine();
481
482
483 // Reset the global timeline frame to match the live frame position. i.e. wrap back at zero.
484 synchronized(sequenceGraph) { // probably will be empty, but just for safety...
485
486 for (TrackSequence ts : sequenceGraph) {
487 ts.initiationFrame -= timelineFrame;
488 if (ts.initiationFrame < 0)
489 ts.initiationFrame = 0;
490 }
491
492 timelineFrame = 0;
493 }
494 }
495
496 /**
497 * Opens the source data line for output.
498 *
499 * @throws LineUnavailableException
500 * If failed to acquire the source data line.
501 */
502 protected void aquireSourceDataLine() throws LineUnavailableException {
503
504 // Select an audio output format
505 DataLine.Info info = new DataLine.Info(
506 SourceDataLine.class,
507 getAudioFormat());
508
509 // Get the source data line to output.
510 srcDataLine = (SourceDataLine)
511 SampledAudioManager.getInstance().getOutputMixure().getLine(info); // LineUnavailableException
512
513 srcDataLine.open(); // LineUnavailableException
514
515 // Cache useful data
516 bufferFrameLength = srcDataLine.getBufferSize() / (2*2); // 2=stereo, 2=16-bit
517 isOutputBigEndian = srcDataLine.getFormat().isBigEndian();
518
519 assert(bufferFrameLength > 0);
520 assert(srcDataLine.getFormat().getSampleSizeInBits() == 16);
521
522 }
523
524 /**
525 * Closes the data line so that if currently writing bytes or the next time
526 * bytes are written, the thread will end.
527 * Does not block calling thread. Thus may take a little while for thread to
528 * end after call returned.
529 */
530 public void stopPlayback() {
531 srcDataLine.close();
532 }
533
534 /**
535 * Note: even if all tracks have been processed in the audio pipeline, it will
536 * commence another pass to check for new tracks added to the graph before finishing.
537 *
538 * @return True if stopping and will not play any tracks added to the queue.
539 */
540 public boolean isStopping() {
541 return isStopping || !srcDataLine.isOpen();
542 }
543
544
545 /**
546 * @return the best audio format for playback...
547 */
548 protected AudioFormat getAudioFormat() {
549 return SampledAudioManager.getInstance().getDefaultPlaybackFormat();
550 }
551
552 /**
553 * The audio mixing pipeline
554 */
555 public abstract void run();
556
557
558
559 }
560
561 private class StereoPlaybackThread extends PlaybackThread {
562
563
564
565 /**
566 * Initantly prepares for audio playback: Opens the (stereo) source data line for output
567 *
568 * @throws LineUnavailableException
569 */
570 StereoPlaybackThread() throws LineUnavailableException {
571 super("Apollo Stereo Playback Mixer Thread");
572
573 }
574
575
576 /**
577 * The audio mixing pipeline
578 */
579 public void run() {
580
581 // Notify observers that some audio has started playing
582 ApolloPlaybackMixer.this.fireSubjectChangedLaterOnSwingThread(
583 new SubjectChangedEvent(ApolloSubjectChangedEvent.PLAYBACK_STARTED));
584
585 // All tracks to play per pass
586 List<TrackSequence> tracksToPlay = new LinkedList<TrackSequence>();
587
588 // Keeps track of tracks to remove
589 List<TrackSequence> completedTracks = new LinkedList<TrackSequence>();
590
591 // The buffer written directly to the source data line
592 byte[] sampleBuffer = new byte[2 * 2 * bufferFrameLength]; // 2=stereo, 2=16-bit samples
593
594 // The mixed frames, where each element refers to a frame
595 int[] mixedFrameBufferL = new int[bufferFrameLength];
596 int[] mixedFrameBufferR = new int[bufferFrameLength];
597
598 // Helpers declared outside loop for mz efficiency
599 int msbL, lsbL;
600 int msbR, lsbR;
601 int sampleL;
602 int sampleR;
603 int totalFramesMixed;
604 int trackCount; // tracks to play at a given pass
605 boolean isMoreQueued; // True if there are more tracks queued.
606 int frameIndex;
607 int i;
608
609 // Begin writing to the source data line
610 if (srcDataLine.isOpen())
611 srcDataLine.start();
612 else return;
613
614 // keep playing as long as line is open (and there is something to play)
615 try
616 {
617 while (srcDataLine.isOpen()) { // The audio mixing pipline
618
619 // First decide on which tracks to play ... and remove any finished tracks.
620 synchronized(sequenceGraph) {
621
622 // If there are no more tracks queued for playing, then exit the
623 // playback thread.
624 if (sequenceGraph.isEmpty())
625 return;
626
627 isMoreQueued = false;
628 completedTracks.clear();
629 tracksToPlay.clear();
630
631 for (TrackSequence ts : sequenceGraph) {
632
633 // Has this track sequence finished?
634 if (ts.currentFrame > ts.endFrame || ts.stopPending)
635 completedTracks.add(ts);
636
637 // Is this track playing / is meant to start laying in this pass?
638 else if (ts.initiationFrame <= (timelineFrame + bufferFrameLength))
639 tracksToPlay.add(ts);
640
641 // If it is not time to play the track yet, then
642 // neither will it for all proceeding tracks either
643 // since they are ordered by their initiation time.
644 else break;
645
646 }
647
648 // Get rid of tracks that have finished playing. Notify models that they have stopped
649 for (TrackSequence staleTS : completedTracks) {
650
651 sequenceGraph.remove(staleTS);
652
653 staleTS.onStopped((staleTS.currentFrame > staleTS.endFrame)
654 ? staleTS.endFrame : staleTS.currentFrame);
655
656 //removeTrackFromGraph(staleTS, staleTS.endFrame);
657 }
658
659 trackCount = tracksToPlay.size();
660 isMoreQueued = sequenceGraph.size() > trackCount;
661
662 // If there is nothing queued and there are no tracks to play,
663 // then playback is finished.
664 if (!isMoreQueued && trackCount == 0)
665 return;
666
667 } // release lock
668
669 totalFramesMixed = 0; // this will be set to the maximum amount of frames that were mixed accross all tracks
670
671 // Clear audio buffer
672 for (i = 0; i < bufferFrameLength; i++) {
673 // TODO: Efficient way of clearing buffer?
674 mixedFrameBufferL[i] = 0;
675 mixedFrameBufferR[i] = 0;
676 }
677
678 // Perform Mixing :
679 // Convert the sample size to 16-bit always for best precision while
680 // processing audio in the mix pipeline....
681 for (TrackSequence ts : tracksToPlay) {
682
683 // Notify model that initiated
684 if (!ts.isPlaying()) ts.onInitiated(timelineFrame);
685
686 // Skip muted / unsoloed tracks - they add nothing to the sample mix
687 if (ts.isMuted || (isSoloEnable && !ts.isSolo)) {
688
689 // Make sure start where initiated, if not already initiated
690 if (ts.initiationFrame >= timelineFrame && ts.initiationFrame < (timelineFrame + bufferFrameLength)) {
691
692 // Get index in frame buffer where to initiate
693 frameIndex = (int)(ts.initiationFrame - timelineFrame);
694
695 // Calcuate the length of frames to buffer - adjust silent tracks position
696 ts.currentFrame += (bufferFrameLength - frameIndex);
697
698 } else { // skip full buffer of bytes ... silenced
699
700 ts.currentFrame += bufferFrameLength; // currentFrame can go outside endframe boundry of the track
701
702 }
703
704 totalFramesMixed = bufferFrameLength;
705
706 } else { // Get samples and add to mix
707
708 // If the track is yet to initiate - part way through the buffer, then start adding bytes
709 // at initiation point
710 if (ts.initiationFrame >= timelineFrame && ts.initiationFrame < (timelineFrame + bufferFrameLength)) {
711
712 frameIndex = (int)(ts.initiationFrame - timelineFrame);
713
714 } else {
715
716 frameIndex = 0;
717
718 }
719
720 // For each frame
721 for (;frameIndex < bufferFrameLength && ts.currentFrame <= ts.endFrame; frameIndex++) {
722
723 // Get sample according to byte order
724 int base_posL = ts.currentFrame * (2*2); // 2=stereo, 2=16-bit
725 int base_posR = base_posL+2;
726
727 if (ts.isBigEndian) {
728
729 // First byte is MSB (high order)
730 msbL = (int)ts.playbackAudioBytes[base_posL];
731
732 // Second byte is LSB (low order)
733 lsbL = (int)ts.playbackAudioBytes[base_posL + 1];
734
735 // And again for the right channel
736 msbR= (int)ts.playbackAudioBytes[base_posR];
737 lsbR = (int)ts.playbackAudioBytes[base_posR + 1];
738
739 } else {
740
741 // First byte is LSB (low order)
742 lsbL = (int)ts.playbackAudioBytes[base_posL];
743
744 // Second byte is MSB (high order)
745 msbL = (int)ts.playbackAudioBytes[base_posL+1];
746
747 // And again for the right channel
748 lsbR = (int)ts.playbackAudioBytes[base_posR];
749 msbR = (int)ts.playbackAudioBytes[base_posR+1];
750 }
751
752 sampleL = (msbL << 0x8) | (0xFF & lsbL);
753 sampleR = (msbR << 0x8) | (0xFF & lsbR);
754
755 // Apply track volume
756 sampleL = (int)(sampleL * ts.volume);
757 sampleR = (int)(sampleR * ts.volume);
758
759 // Add to current mix
760 mixedFrameBufferL[frameIndex] += sampleL;
761 mixedFrameBufferR[frameIndex] += sampleR;
762
763 // Get next sample
764 ts.currentFrame++;
765 }
766
767
768 // Keep track of total frames mixed in buffer
769 if (frameIndex > totalFramesMixed)
770 totalFramesMixed = frameIndex;
771 }
772
773 } // Mix in next track
774
775 // totalFramesMixed is the amount of frames to play.
776 // If it is zero then it means that there are tracks yet to be initiated, and nothing currently playing
777 assert (totalFramesMixed <= bufferFrameLength);
778 assert (totalFramesMixed > 0 ||
779 (totalFramesMixed == 0 && trackCount == 0 && isMoreQueued));
780
781 // Post mix with master settings
782 if (isMasterMuteOn) { // Silence sample buffer if master mute is on
783
784 for (i = 0; i < sampleBuffer.length; i++) {
785 sampleBuffer[i] = 0;
786 }
787
788 // Let the muted bytes play
789 totalFramesMixed = bufferFrameLength;
790
791 } else { // otherwise apply master volume
792
793 for (i = 0; i < totalFramesMixed; i++) {
794
795 // Average tracks
796 //mixedFrameBuffer[i] /= trackCount; // depreciated
797
798 // Apply master volume
799 mixedFrameBufferL[i] = (int)(mixedFrameBufferL[i] * masterVolume);
800 mixedFrameBufferR[i] = (int)(mixedFrameBufferR[i] * masterVolume);
801
802 // Clip
803 if (mixedFrameBufferL[i] > Short.MAX_VALUE) mixedFrameBufferL[i] = Short.MAX_VALUE;
804 else if (mixedFrameBufferL[i] < Short.MIN_VALUE) mixedFrameBufferL[i] = Short.MIN_VALUE;
805
806 if (mixedFrameBufferR[i] > Short.MAX_VALUE) mixedFrameBufferR[i] = Short.MAX_VALUE;
807 else if (mixedFrameBufferR[i] < Short.MIN_VALUE) mixedFrameBufferR[i] = Short.MIN_VALUE;
808
809 // Convert to output format
810 lsbL = (mixedFrameBufferL[i] & 0xFF);
811 msbL = ((mixedFrameBufferL[i] >> 8) & 0xFF);
812 lsbR = (mixedFrameBufferR[i] & 0xFF);
813 msbR = ((mixedFrameBufferR[i] >> 8) & 0xFF);
814
815 int base_posL = i * (2 * 2); // 2=stereo, 2=16-bits
816 int base_posR = base_posL + 2;
817 if (isOutputBigEndian) {
818 sampleBuffer[base_posL] = (byte)msbL;
819 sampleBuffer[base_posL+1] = (byte)lsbL;
820 sampleBuffer[base_posR] = (byte)msbR;
821 sampleBuffer[base_posR+1] = (byte)lsbR;
822 } else {
823 sampleBuffer[base_posL] = (byte)lsbL;
824 sampleBuffer[base_posL+1] = (byte)msbL;
825 sampleBuffer[base_posR] = (byte)lsbR;
826 sampleBuffer[base_posR+1] = (byte)msbR;
827 }
828
829 }
830
831 }
832
833 // Generate silence only if there are more tracks to be played.
834 // Note that this could be false, but a track might have been queued after
835 // setting the isMoreQueued flag. In such cases... silence is not wanted anyway!
836 if (isMoreQueued) {
837 for (i = totalFramesMixed; i < bufferFrameLength; i++) { // will skip if no need to generate silence
838 int base_posL = i * (2 * 2); // 2=stereo, 2=16-bits
839 int base_posR = base_posL + 2;
840
841 sampleBuffer[base_posL] = 0;
842 sampleBuffer[base_posL+1] = 0;
843 sampleBuffer[base_posR] = 0;
844 sampleBuffer[base_posR+1] = 0;
845 }
846 // Ensure that full buffer is played ... including the silence
847 totalFramesMixed = bufferFrameLength;
848 }
849
850 // Write processed bytes to line out stream and update the timeline frame
851 srcDataLine.write(
852 sampleBuffer,
853 0,
854 totalFramesMixed * (2 * 2)); // 2=stereo, 2=16-bits
855
856 // Update timeline counter for sequencing management
857 timelineFrame += totalFramesMixed;
858
859 // The timelineFrame should always be larger or equal to the live frame position
860 assert(timelineFrame >= srcDataLine.getLongFramePosition());
861
862 } // Next pass
863
864 } finally {
865
866 isStopping = true;
867
868 // Ensure line freed
869 if (srcDataLine.isOpen()) {
870 srcDataLine.drain(); // avoids chopping off last buffered chunk
871 srcDataLine.close();
872 }
873
874 // Clear sequence graph.
875 synchronized(sequenceGraph) {
876
877 for (TrackSequence track : sequenceGraph) {
878
879 track.onStopped((track.currentFrame > track.endFrame)
880 ? track.endFrame : track.currentFrame);
881 }
882
883 sequenceGraph.clear();
884
885 }
886
887 // Notify observers that playback has finished.
888 ApolloPlaybackMixer.this.fireSubjectChangedLaterOnSwingThread(
889 new SubjectChangedEvent(ApolloSubjectChangedEvent.PLAYBACK_STOPPED));
890
891 }
892
893 }
894 }
895
896 private class MonoPlaybackThread extends PlaybackThread {
897
898
899
900 /**
901 * Initantly prepares for audio playback: Opens the (stereo) source data line for output
902 *
903 * @throws LineUnavailableException
904 */
905 MonoPlaybackThread() throws LineUnavailableException {
906 super("Apollo Mono Playback Mixer Thread");
907
908 }
909
910 /**
911 * The audio mixing pipeline
912 */
913 public void run() {
914
915 // Notify observers that some audio has started playing
916 ApolloPlaybackMixer.this.fireSubjectChangedLaterOnSwingThread(
917 new SubjectChangedEvent(ApolloSubjectChangedEvent.PLAYBACK_STARTED));
918
919 // All tracks to play per pass
920 List<TrackSequence> tracksToPlay = new LinkedList<TrackSequence>();
921
922 // Keeps track of tracks to remove
923 List<TrackSequence> completedTracks = new LinkedList<TrackSequence>();
924
925 // The buffer written directly to the source data line
926 byte[] sampleBuffer = new byte[2 * bufferFrameLength];
927
928 // The mixed frames, where each element refers to a frame
929 int[] mixedFrameBuffer = new int[bufferFrameLength];
930
931 // Helpers declared outside loop for mz efficiency
932 int msb, lsb;
933 int sample;
934 int totalFramesMixed;
935 int trackCount; // tracks to play at a given pass
936 boolean isMoreQueued; // True if there are more tracks queued.
937 int frameIndex;
938 int i;
939
940 // Begin writing to the source data line
941 if (srcDataLine.isOpen())
942 srcDataLine.start();
943 else return;
944
945 // keep playing as long as line is open (and there is something to play)
946 try
947 {
948 while (srcDataLine.isOpen()) { // The audio mixing pipline
949
950 // First decide on which tracks to play ... and remove any finished tracks.
951 synchronized(sequenceGraph) {
952
953 // If there are no more tracks queued for playing, then exit the
954 // playback thread.
955 if (sequenceGraph.isEmpty())
956 return;
957
958 isMoreQueued = false;
959 completedTracks.clear();
960 tracksToPlay.clear();
961
962 for (TrackSequence ts : sequenceGraph) {
963
964 // Has this track sequence finished?
965 if (ts.currentFrame > ts.endFrame || ts.stopPending)
966 completedTracks.add(ts);
967
968 // Is this track playing / is meant to start laying in this pass?
969 else if (ts.initiationFrame <= (timelineFrame + bufferFrameLength))
970 tracksToPlay.add(ts);
971
972 // If it is not time to play the track yet, then
973 // neither will it for all proceeding tracks either
974 // since they are ordered by their initiation time.
975 else break;
976
977 }
978
979 // Get rid of tracks that have finished playing. Notify models that they have stopped
980 for (TrackSequence staleTS : completedTracks) {
981
982 sequenceGraph.remove(staleTS);
983
984 staleTS.onStopped((staleTS.currentFrame > staleTS.endFrame)
985 ? staleTS.endFrame : staleTS.currentFrame);
986
987 //removeTrackFromGraph(staleTS, staleTS.endFrame);
988 }
989
990 trackCount = tracksToPlay.size();
991 isMoreQueued = sequenceGraph.size() > trackCount;
992
993 // If there is nothing queued and there are no tracks to play,
994 // then playback is finished.
995 if (!isMoreQueued && trackCount == 0)
996 return;
997
998 } // release lock
999
1000 totalFramesMixed = 0; // this will be set to the maximum amount of frames that were mixed accross all tracks
1001
1002 // Clear audio buffer
1003 for (i = 0; i < bufferFrameLength; i++) // TODO: Efficient way of clearing buffer?
1004 mixedFrameBuffer[i] = 0;
1005
1006 // Perform Mixing :
1007 // Convert the sample size to 16-bit always for best precision while
1008 // proccessing audio in the mix pipeline....
1009 for (TrackSequence ts : tracksToPlay) {
1010
1011 // Notify model that initiated
1012 if (!ts.isPlaying()) ts.onInitiated(timelineFrame);
1013
1014 // Skip muted / unsoloed tracks - they add nothing to the sample mix
1015 if (ts.isMuted || (isSoloEnable && !ts.isSolo)) {
1016
1017 // Make sure start where initiated, if not already initiated
1018 if (ts.initiationFrame >= timelineFrame && ts.initiationFrame < (timelineFrame + bufferFrameLength)) {
1019
1020 // Get index in frame buffer where to initiate
1021 frameIndex = (int)(ts.initiationFrame - timelineFrame);
1022
1023 // Calcuate the length of frames to buffer - adjust silent tracks position
1024 ts.currentFrame += (bufferFrameLength - frameIndex);
1025
1026 } else { // skip full buffer of bytes ... silenced
1027
1028 ts.currentFrame += bufferFrameLength; // currentFrame can go outside endframe boundry of the track
1029
1030 }
1031
1032 totalFramesMixed = bufferFrameLength;
1033
1034 } else { // Get samples and add to mix
1035
1036 // If the track is yet to initiate - part way through the buffer, then start adding bytes
1037 // at initiation point
1038 if (ts.initiationFrame >= timelineFrame && ts.initiationFrame < (timelineFrame + bufferFrameLength)) {
1039
1040 frameIndex = (int)(ts.initiationFrame - timelineFrame);
1041
1042 } else {
1043
1044 frameIndex = 0;
1045
1046 }
1047
1048 // For each frame
1049 for (;frameIndex < bufferFrameLength && ts.currentFrame <= ts.endFrame; frameIndex++) {
1050
1051 // Get sample according to byte order
1052 if (ts.isBigEndian) {
1053
1054 // First byte is MSB (high order)
1055 msb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame];
1056
1057 // Second byte is LSB (low order)
1058 lsb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame + 1];
1059
1060 } else {
1061
1062 // First byte is LSB (low order)
1063 lsb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame];
1064
1065 // Second byte is MSB (high order)
1066 msb = (int)ts.playbackAudioBytes[ts.currentFrame + ts.currentFrame + 1];
1067 }
1068
1069 sample = (msb << 0x8) | (0xFF & lsb);
1070
1071 // Apply track volume
1072 sample = (int)(sample * ts.volume);
1073
1074 // Add to current mix
1075 mixedFrameBuffer[frameIndex] += sample;
1076
1077 // Get next sample
1078 ts.currentFrame++;
1079 }
1080
1081
1082 // Keep track of total frames mixed in buffer
1083 if (frameIndex > totalFramesMixed)
1084 totalFramesMixed = frameIndex;
1085 }
1086
1087 } // Mix in next track
1088
1089 // totalFramesMixed is the amount of frames to play.
1090 // If it is zero then it means that there are tracks yet to be intiated, and nothing currently playing
1091 assert (totalFramesMixed <= bufferFrameLength);
1092 assert (totalFramesMixed > 0 ||
1093 (totalFramesMixed == 0 && trackCount == 0 && isMoreQueued));
1094
1095 // Post mix with master settings
1096 if (isMasterMuteOn) { // Silence sample buffer if master mute is on
1097
1098 for (i = 0; i < sampleBuffer.length; i++) {
1099 sampleBuffer[i] = 0;
1100 }
1101
1102 // Let the muted bytes play
1103 totalFramesMixed = bufferFrameLength;
1104
1105 } else { // otherwise apply master volume
1106
1107 for (i = 0; i < totalFramesMixed; i++) {
1108
1109 // Average tracks
1110 //mixedFrameBuffer[i] /= trackCount; // depreciated
1111
1112 // Apply master volume
1113 mixedFrameBuffer[i] = (int)(mixedFrameBuffer[i] * masterVolume);
1114
1115 // Clip
1116 if (mixedFrameBuffer[i] > Short.MAX_VALUE) mixedFrameBuffer[i] = Short.MAX_VALUE;
1117 else if (mixedFrameBuffer[i] < Short.MIN_VALUE) mixedFrameBuffer[i] = Short.MIN_VALUE;
1118
1119 // Convert to output format
1120 lsb = (mixedFrameBuffer[i] & 0xFF);
1121 msb = ((mixedFrameBuffer[i] >> 8) & 0xFF);
1122
1123 if (isOutputBigEndian) {
1124 sampleBuffer[i+i] = (byte)msb;
1125 sampleBuffer[i+i+1] = (byte)lsb;
1126 } else {
1127 sampleBuffer[i+i] = (byte)lsb;
1128 sampleBuffer[i+i+1] = (byte)msb;
1129 }
1130
1131 }
1132
1133 }
1134
1135 // Generate silence only if there are more tracks to be played.
1136 // Note that this could be false, but a track might have been queued after
1137 // setting the isMoreQueued flag. In such cases... silence is not wanted anyway!
1138 if (isMoreQueued) {
1139 for (i = totalFramesMixed; i < bufferFrameLength; i++) { // will skip if no need to generate silence
1140 sampleBuffer[i+i] = 0;
1141 sampleBuffer[i+i+1] = 0;
1142 }
1143 // Ensure that full buffer is played ... including the silence
1144 totalFramesMixed = bufferFrameLength;
1145 }
1146
1147 // Write processed bytes to line out stream and update the timeline frame
1148 srcDataLine.write(
1149 sampleBuffer,
1150 0,
1151 totalFramesMixed * 2);
1152
1153 // Update timeline counter for sequencing management
1154 timelineFrame += totalFramesMixed;
1155
1156 // The timelineFrame should always be larger or equal to the live frame position
1157 assert(timelineFrame >= srcDataLine.getLongFramePosition());
1158
1159 } // Next pass
1160
1161 } finally {
1162
1163 isStopping = true;
1164
1165 // Ensure line freed
1166 if (srcDataLine.isOpen()) {
1167 srcDataLine.drain(); // avoids chopping off last buffered chunk
1168 srcDataLine.close();
1169 }
1170
1171 // Clear sequence graph.
1172 synchronized(sequenceGraph) {
1173
1174 for (TrackSequence track : sequenceGraph) {
1175
1176 track.onStopped((track.currentFrame > track.endFrame)
1177 ? track.endFrame : track.currentFrame);
1178 }
1179
1180 sequenceGraph.clear();
1181
1182 }
1183
1184 // Notify observers that playback has finished.
1185 ApolloPlaybackMixer.this.fireSubjectChangedLaterOnSwingThread(
1186 new SubjectChangedEvent(ApolloSubjectChangedEvent.PLAYBACK_STOPPED));
1187
1188 }
1189
1190 }
1191
1192 }
1193
1194
1195}
Note: See TracBrowser for help on using the repository browser.