/Users/lyon/j4p/src/sound/player/SoundPlayback.java

1    package sound.player; 
2     
3    /** 
4     * Created by IntelliJ IDEA. 
5     * User: Douglas Lyon 
6     * Date: Dec 13, 2004 
7     * Time: 9:07:43 PM 
8     * Copyright DocJava, Inc. 
9     */ 
10    
11   import javax.sound.sampled.*; 
12   import java.io.ByteArrayInputStream; 
13   import java.io.File; 
14   import java.io.IOException; 
15   import java.util.ArrayList; 
16   import java.util.StringTokenizer; 
17    
18   ///////////////////////////////////////////////////////////// 
19   //// SoundPlayback 
20    
21   /** 
22    * <h2>Overview</h2> 
23    * A buffer supporting the real-time playback of audio and the writing 
24    * of audio data to a sound file. Single channel 
25    * (mono) and multichannel audio (stereo) are supported. This class, 
26    * along with SoundCapture, intends to provide an easy to use interface 
27    * to Java Sound, Java's audio API. Java Sound supports the writing 
28    * of audio data to a sound file or the computer's audio output port, 
29    * but only at the byte level, which is audio format specific. This class, 
30    * however, provides higher level support for the writing of double 
31    * or integer valued samples to the computer's audio output port or 
32    * any supported sound file type. This class is therefore useful when 
33    * it one desires to playback audio samples in an audio format independent 
34    * way. 
35    * <p/> 
36    * Depending on available system resources, it may be possible to 
37    * run an instance of this class and an instance of SoundCapture 
38    * concurrently. This allows for the concurrent capture, signal 
39    * processing, and playback of audio data. 
40    * <p/> 
41    * <h2>Usage</h2> 
42    * Two constructors are provided. One constructor creates a sound playback 
43    * object that sends audio data to the speaker. If this constructor is 
44    * used, there will be a small 
45    * delay between the time that the audio data is delivered to this 
46    * object and the time that the corresponding audio is actually 
47    * heard. This latency can be adjusted by setting the <i>bufferSize</i> 
48    * constructor parameter.  Another constructor 
49    * creates a sound playback object that sends audio data to a sound 
50    * file. 
51    * <p/> 
52    * After calling the appropriate constructor, startPlayback() 
53    * must be called to initialize the audio system. 
54    * The putSamples() or putSamplesInt() method should then be repeatedly 
55    * called to deliver the audio data to the audio output device 
56    * (speaker or file). The audio samples delivered to putSamples() 
57    * should be in the proper range, or clipping will occur. 
58    * putSamples() expects the samples to be in the range (-1, 1). 
59    * putSamplesInt() expects the samples to be in the range 
60    * (-2^(bits_per_sample/2), 2^(bits_per_sample/2)), where 
61    * bits_per_sample is the number of bits per sample. 
62    * Note that it is possible (but probably 
63    * not useful) to interleave calls to putSamples() and 
64    * putSamplesInt(). 
65    * Finally, after no more audio playback is desired, stopPlayback() 
66    * should be called to free up audio system resources. 
67    * <p/> 
68    * <h2>Security issues</h2>Applications have no restrictions on the 
69    * capturing or playback of audio. Applet code is not allowed to 
70    * write native files by default. The .java.policy file must be 
71    * modified to grant applets more privileges. 
72    * <p/> 
73    * Note: Requires Java 2 v1.3.0 or later. 
74    * 
75    * @author Brian K. Vogel 
76    * @version $Id: SoundPlayback.java,v 1.39 2003/04/11 16:15:19 cxh Exp $ 
77    * @since Ptolemy II 1.0 
78    */ 
79    
80   public class SoundPlayback { 
81    
82       /** 
83        * Construct a sound playback object that plays audio through the 
84        * computer's speaker. Note 
85        * that when this constructor is used, putSamples() should be 
86        * called often enough to prevent underflow of the internal audio 
87        * input buffer. 
88        * 
89        * @param sampleRate       Sample rate in Hz. Must be in the range: 8000 
90        *                         to 48000. 
91        * @param sampleSizeInBits Number of bits per sample (valid choices are 
92        *                         8 or 16). 
93        * @param channels         Number of audio channels. 1 for mono, 2 for 
94        *                         stereo, etc. 
95        * @param bufferSize       Requested size of the internal audio input 
96        *                         buffer in samples. This controls the latency (delay from 
97        *                         the time putSamples() is called until the audio is 
98        *                         actually heard). A lower bound on the latency is given by 
99        *                         (<i>bufferSize</i> / <i>sampleRate</i>) seconds. 
100       *                         Ideally, the 
101       *                         smallest value that gives acceptable performance (no underflow) 
102       *                         should be used. Typical values are about 1/10 th the sample 
103       *                         rate. For example, at 44100 Hz sample rate, a typical buffer 
104       *                         size value might be 4410. 
105       * @param putSamplesSize   Size of the array parameter of 
106       *                         putSamples(). For performance reasons, the size should 
107       *                         be chosen smaller than <i>bufferSize</i>. Typical values 
108       *                         are 1/2 to 1/16 th of <i>bufferSize</i>. 
109       */ 
110      public SoundPlayback(float sampleRate, int sampleSizeInBits, 
111                           int channels, int bufferSize, 
112                           int putSamplesSize) { 
113          _isAudioPlaybackActive = false; 
114          // Set mode to real-time. 
115          this._playbackMode = "speaker"; 
116          this._sampleSizeInBits = sampleSizeInBits; 
117          this._sampleRate = sampleRate; 
118          this._channels = channels; 
119          this._bufferSize = bufferSize; 
120          this._putSamplesSize = putSamplesSize; 
121      } 
122   
123      /** 
124       * Construct a sound playback object that writes audio to 
125       * a sound file with the specified name.  Valid sound file 
126       * formats are WAVE (.wav), AIFF (.aif, .aiff), AU (.au). The file 
127       * format is automatically determined from the file extension. 
128       * The sound file will be initialized when startPlayback() is 
129       * called. If there is a problem creating the sound file, an 
130       * IOException will be thrown in startPlayback(). 
131       * Thereafter, each call to putSamples() will add 
132       * <i>putSamplesSize</i> samples to the sound file. To 
133       * close and save the sound file, call stopPlayback(). 
134       * <p/> 
135       * Note that the audio data will not actually be saved to file, 
136       * <i>fileName</i>, until stopPlayback() is called. If an 
137       * unknown audio format is used, an exception will be thrown 
138       * in stopPlayback(). 
139       * 
140       * @param fileName         The file name to create. If the file already 
141       *                         exists, overwrite it. Valid sound file formats are WAVE (.wav), 
142       *                         AIFF (.aif, .aiff), AU (.au). The file format to write is 
143       *                         determined automatically from the file extension. 
144       * @param sampleRate       Sample rate in Hz. Must be in the range: 8000 
145       *                         to 48000. 
146       * @param sampleSizeInBits Number of bits per sample (valid choices are 
147       *                         8 or 16). 
148       * @param channels         Number of audio channels. 1 for mono, 2 for 
149       *                         stereo. 
150       * @param putSamplesSize   Size of the array parameter of 
151       *                         putSamples(). There is no restriction on the value of 
152       *                         this parameter, but typical values are 64-2024. 
153       */ 
154      public SoundPlayback(String fileName, 
155                           float sampleRate, int sampleSizeInBits, 
156                           int channels, int bufferSize, 
157                           int putSamplesSize) { 
158          _isAudioPlaybackActive = false; 
159          this._playbackMode = "file"; 
160          this._fileName = fileName; 
161          this._sampleSizeInBits = sampleSizeInBits; 
162          this._sampleRate = sampleRate; 
163          this._channels = channels; 
164          this._productionRate = putSamplesSize; 
165      } 
166   
167      /////////////////////////////////////////////////////////////////// 
168      ///  Public Methods                                         /// 
169   
170      /** 
171       * Play an array of audio samples. 
172       * If the "play audio to speaker" constructor was called, 
173       * then queue the array of audio samples in 
174       * <i>putSamplesArray</i> for playback. There will be a 
175       * latency before the audio data is actually heard, since the 
176       * audio data in <i>putSamplesArray</i> is queued to an 
177       * internal audio buffer. The size of the internal buffer 
178       * is set by the constructor. A lower bound on the latency 
179       * is given by (<i>bufferSize</i> / <i>sampleRate</i>) 
180       * seconds. If the "play audio to speaker" mode is 
181       * used, then this method should be invoked often 
182       * enough to prevent underflow of the internal audio buffer. 
183       * Underflow is undesirable since it will cause audible gaps 
184       * in audio playback, but no exception or error condition will 
185       * occur. If the caller attempts to write more data than can 
186       * be written, this method blocks until the data can be 
187       * written to the internal audio buffer. 
188       * <p/> 
189       * If the "write audio to file" constructor was used, 
190       * then append the audio data contained in <i>putSamplesArray</i> 
191       * to the sound file specified in the constructor. Note that 
192       * underflow cannot occur for this case. 
193       * <p/> 
194       * The samples should be in the range (-1, 1). Samples that are 
195       * outside ths range will be hard-clipped so that they fall 
196       * within this range. 
197       * 
198       * @param putSamplesArray A two dimensional array containing 
199       *                        the samples to play or write to a file. The first index 
200       *                        represents the channel number (0 for first channel, 1 for 
201       *                        second channel, etc.). The second index represents the 
202       *                        sample index within a channel. For example, 
203       *                        putSamplesArray[n][m] contains the (m+1)th sample 
204       *                        of the (n+1)th channel. putSamplesArray should be a 
205       *                        rectangular array such that putSamplesArray.length() gives 
206       *                        the number of channels and putSamplesArray[n].length() is 
207       *                        equal to <i>putSamplesSize</i>, for all channels n. This 
208       *                        is not actually checked, however. 
209       * @throws IOException           If there is a problem playing audio. 
210       * @throws IllegalStateException If audio playback is currently 
211       *                               inactive. That is, If startPlayback() has not yet been called 
212       *                               or if stopPlayback() has already been called. 
213       */ 
214      public void putSamples(double[][] putSamplesArray) throws IOException, 
215              IllegalStateException { 
216          if (_isAudioPlaybackActive == true) { 
217              if (_playbackMode == "speaker") { 
218   
219                  // Convert array of double valued samples into 
220                  // the proper byte array format. 
221                  _data = _doubleArrayToByteArray(putSamplesArray, 
222                          _bytesPerSample, 
223                          _channels); 
224   
225                  // Note: _data is a byte array containing data to 
226                  // be written to the output device. 
227                  // Note: consumptionRate is amount of data to write, in bytes. 
228   
229                  // Now write the array to output device. 
230                  _sourceLine.write(_data, 0, _putSamplesSize * _frameSizeInBytes); 
231              } else if (_playbackMode == "file") { 
232                  // Convert array of double valued samples into 
233                  // the proper byte array format. 
234                  _data = _doubleArrayToByteArray(putSamplesArray, 
235                          _bytesPerSample, 
236                          _channels); 
237                  // Add new audio data to the file buffer array. 
238                  for (int i = 0; i < _data.length; i++) { 
239                      _toFileBuffer.add(new Byte(_data[i])); 
240                  } 
241              } else { 
242                  // Should not happen since caught by constructor. 
243              } 
244          } else { 
245              throw new IllegalStateException("SoundPlayback: " + 
246                      "putSamples() was called while audio playback was" + 
247                      " inactive (startPlayback() was never called or " + 
248                      "stopPlayback has already been called)."); 
249          } 
250      } 
251   
252      /** 
253       * Play an array of audio samples. 
254       * If the "play audio to speaker" constructor was called, 
255       * then queue the array of audio samples in 
256       * <i>putSamplesArray</i> for playback. The samples should be 
257       * in the range (-2^(bits_per_sample/2), 2^(bits_per_sample/2)). 
258       * There will be a latency before 
259       * the audio data is actually heard, since the 
260       * audio data in <i>putSamplesArray</i> is queued to an 
261       * internal audio buffer. The size of the internal buffer 
262       * is set by the constructor. A lower bound on the latency 
263       * is given by (<i>bufferSize</i> / <i>sampleRate</i>) 
264       * seconds. If the "play audio to speaker" mode is 
265       * used, then this method should be invoked often 
266       * enough to prevent underflow of the internal audio buffer. 
267       * <p/> 
268       * If the "write audio to file" constructor was used, 
269       * then append the audio data contained in <i>putSamplesArray</i> 
270       * to the sound file specified in the constructor. 
271       * <p/> 
272       * The samples should be in the range 
273       * (-2^(bits_per_sample/2), 2^(bits_per_sample/2)). Samples 
274       * that are outside this range will be hard-clipped. 
275       * 
276       * @param putSamplesArray A two dimensional array containing 
277       *                        the samples to play or write to a file. The first index 
278       *                        represents the channel number (0 for first channel, 1 for 
279       *                        second channel, etc.). The second index represents the 
280       *                        sample index within a channel. For example, 
281       *                        putSamplesArray[n][m] contains the (m+1)th sample 
282       *                        of the (n+1)th channel. putSamplesArray should be a 
283       *                        rectangular array such that putSamplesArray.length() gives 
284       *                        the number of channels and putSamplesArray[n].length() is 
285       *                        equal to <i>putSamplesSize</i>, for all channels n. This 
286       *                        is not actually checked, however. 
287       * @throws IOException           If there is a problem playing audio. 
288       * @throws IllegalStateException If audio playback is currently 
289       *                               inactive. That is, If startPlayback() has not yet been called 
290       *                               or if stopPlayback() has already been called. 
291       */ 
292      public void putSamplesInt(int[][] putSamplesArray) throws IOException, 
293              IllegalStateException { 
294          if (_isAudioPlaybackActive == true) { 
295              if (_playbackMode == "speaker") { 
296   
297                  // Convert array of double valued samples into 
298                  // the proper byte array format. 
299                  _data = _intArrayToByteArray(putSamplesArray, 
300                          _bytesPerSample, 
301                          _channels); 
302   
303                  // Note: _data is a byte array containing data to 
304                  // be written to the output device. 
305                  // Note: consumptionRate is amount of data to write, in bytes. 
306   
307                  // Now write the array to output device. 
308                  _sourceLine.write(_data, 0, _putSamplesSize * _frameSizeInBytes); 
309              } else if (_playbackMode == "file") { 
310                  // Convert array of double valued samples into 
311                  // the proper byte array format. 
312                  _data = _intArrayToByteArray(putSamplesArray, 
313                          _bytesPerSample, 
314                          _channels); 
315                  // Add new audio data to the file buffer array. 
316                  for (int i = 0; i < _data.length; i++) { 
317                      _toFileBuffer.add(new Byte(_data[i])); 
318                  } 
319              } else { 
320                  // Should not happen since caught by constructor. 
321              } 
322          } else { 
323              throw new IllegalStateException("SoundPlayback: " + 
324                      "putSamples() was called while audio playback was" + 
325                      " inactive (startPlayback() was never called or " + 
326                      "stopPlayback has already been called)."); 
327          } 
328      } 
329   
330      /** 
331       * Perform initialization for the playback of audio data. 
332       * This method must be invoked prior 
333       * to the first invocation of putSamples(). This method 
334       * must not be called more than once between invocations of 
335       * stopPlayback(), or an exception will be thrown. 
336       * 
337       * @throws IOException           If there is a problem setting up 
338       *                               the system for audio playback. This will occur if 
339       *                               a file cannot be opened or if the audio out port cannot 
340       *                               be accessed. 
341       * @throws IllegalStateException If this method is called 
342       *                               more than once between invocations of stopCapture(). 
343       */ 
344      public void startPlayback() throws IOException, 
345              IllegalStateException { 
346          if (_isAudioPlaybackActive == false) { 
347              if (_playbackMode == "speaker") { 
348                  // Real time playback to speaker. 
349                  _startPlaybackRealTime(); 
350              } else if (_playbackMode == "file") { 
351                  // Record data to sound file. 
352                  _startPlaybackToFile(); 
353              } else { 
354                  throw new IOException("SoundPlayback: " + 
355                          "startPlayback(): unknown playback mode: " + 
356                          _playbackMode); 
357              } 
358              _bytesPerSample = _sampleSizeInBits / 8; 
359              _isAudioPlaybackActive = true; 
360          } else { 
361              throw new IllegalStateException("SoundPlayback: " + 
362                      "startPlayback() was called while audio playback was" + 
363                      " already active (startPlayback() was called " + 
364                      "more than once between invocations of stopPlayback())."); 
365          } 
366      } 
367   
368      /** 
369       * Stop playing/writing audio. This method should be called when 
370       * no more calls to putSamples(). are required, so 
371       * that the system resources involved in the audio playback 
372       * may be freed. 
373       * <p/> 
374       * If the "write audio data to file" constructor was used, then 
375       * the sound file specified by the constructor is saved and 
376       * closed. 
377       * 
378       * @throws IOException If there is a problem closing the 
379       *                     audio resources, or if the "write audio data 
380       *                     to file" constructor was used  and the sound file has an 
381       *                     unsupported format. 
382       */ 
383      public void stopPlayback() throws IOException { 
384          if (_isAudioPlaybackActive == true) { 
385              if (_playbackMode == "speaker") { 
386                  // Stop real-time playback to speaker. 
387                  if (_sourceLine != null) { 
388                      _sourceLine.drain(); 
389                      _sourceLine.stop(); 
390                      _sourceLine.close(); 
391                  } 
392                  _sourceLine = null; 
393              } else if (_playbackMode == "file") { 
394                  // Record data to sound file. 
395                  _stopPlaybackToFile(); 
396              } else { 
397                  // Should not happen. 
398              } 
399          } 
400          _isAudioPlaybackActive = false; 
401      } 
402   
403      /////////////////////////////////////////////////////////////////// 
404      ////                         private methods                   //// 
405   
406      private void _startPlaybackRealTime() throws IOException { 
407          boolean signed = true; 
408          boolean bigEndian = true; 
409   
410          AudioFormat format = new AudioFormat((float) _sampleRate, 
411                  _sampleSizeInBits, 
412                  _channels, signed, bigEndian); 
413   
414          _frameSizeInBytes = format.getFrameSize(); 
415   
416          DataLine.Info sourceInfo = new DataLine.Info(SourceDataLine.class, 
417                  format, 
418                  AudioSystem.NOT_SPECIFIED); 
419   
420          // get and open the source data line for playback. 
421          try { 
422              // Source DataLinet is really a target for 
423              // audio data, not a source. 
424              _sourceLine = (SourceDataLine) AudioSystem.getLine(sourceInfo); 
425              // Open line and suggest a buffer size (in bytes) to use or 
426              // the internal audio buffer. 
427              _sourceLine.open(format, _bufferSize * _frameSizeInBytes); 
428   
429          } catch (LineUnavailableException ex) { 
430              throw new IOException("Unable to open the line for " + 
431                      "real-time audio playback: " + ex); 
432          } 
433   
434          // Array of audio samples in byte format. 
435          _data = new byte[_productionRate * _frameSizeInBytes * _channels]; 
436   
437          // Start the source data line 
438          _sourceLine.start(); 
439      } 
440   
441      private void _startPlaybackToFile() { 
442          // FIXME: Performance is not great when the incoming audio 
443          // samples are being captured in real-time, possibly 
444          // due to resizing of the ArrayList. 
445          // 
446          // Array to hold all data to be saved to file. Grows 
447          // as new data are added (via putSamples()). 
448          // Each element is a byte of audio data. 
449          _toFileBuffer = new ArrayList(); 
450   
451          boolean signed = true; 
452          boolean bigEndian = true; 
453   
454          _playToFileFormat = new AudioFormat((float) _sampleRate, 
455                  _sampleSizeInBits, 
456                  _channels, signed, bigEndian); 
457   
458          _frameSizeInBytes = _playToFileFormat.getFrameSize(); 
459      } 
460   
461   
462      private void _stopPlaybackToFile() throws IOException { 
463          int size = _toFileBuffer.size(); 
464          byte[] audioBytes = new byte[size]; 
465          for (int i = 0; i < size; i++) { 
466              Byte j = (Byte) _toFileBuffer.get(i); 
467              audioBytes[i] = j.byteValue(); 
468          } 
469          ByteArrayInputStream byteInputArrayStream = 
470                  new ByteArrayInputStream(audioBytes); 
471   
472          AudioInputStream audioInputStream = 
473                  new AudioInputStream(byteInputArrayStream, 
474                          _playToFileFormat, 
475                          audioBytes.length / _frameSizeInBytes); 
476   
477          File outFile = new File(_fileName); 
478   
479   
480          try { 
481              StringTokenizer st = new StringTokenizer(_fileName, "."); 
482              // Do error checking: 
483              if (st.countTokens() != 2) { 
484                  throw new IOException("Error: Incorrect " + 
485                          "file name format. " + 
486                          "Format: filename.extension"); 
487              } 
488              st.nextToken(); // Advance to the file extension. 
489   
490              String fileExtension = st.nextToken(); 
491   
492              if (fileExtension.equalsIgnoreCase("au")) { 
493                  // Save the file. 
494                  AudioSystem.write(audioInputStream, 
495                          AudioFileFormat.Type.AU, outFile); 
496              } else if (fileExtension.equalsIgnoreCase("aiff")) { 
497                  // Save the file. 
498                  AudioSystem.write(audioInputStream, 
499                          AudioFileFormat.Type.AIFF, outFile); 
500              } else if (fileExtension.equalsIgnoreCase("wave")) { 
501                  // Save the file. 
502                  AudioSystem.write(audioInputStream, 
503                          AudioFileFormat.Type.WAVE, outFile); 
504              } else if (fileExtension.equalsIgnoreCase("wav")) { 
505                  // Save the file. 
506                  AudioSystem.write(audioInputStream, 
507                          AudioFileFormat.Type.WAVE, outFile); 
508              } else if (fileExtension.equalsIgnoreCase("aifc")) { 
509                  // Save the file. 
510                  AudioSystem.write(audioInputStream, 
511                          AudioFileFormat.Type.AIFC, outFile); 
512              } else { 
513                  throw new IOException("Error saving " + 
514                          "file: Unknown file format: " + 
515                          fileExtension); 
516              } 
517          } catch (IOException e) { 
518              throw new IOException("SoundPlayback: error saving" + 
519                      " file: " + e); 
520          } 
521      } 
522   
523      /* Convert a double array of audio samples into a byte array of 
524       * audio samples in linear signed pcm big endian format. The 
525       * samples contained in <i>doubleArray</i> should be in the 
526       * range (-1, 1). Samples outside this range will be hard clipped 
527       * to the range (-1, 1). 
528       * @param doubleArray Two dimensional array holding audio samples. 
529       * For each channel, m, doubleArray[m] is a single dimensional 
530       * array containing samples for channel m. 
531       * @param bytesPerSample Number of bytes per sample. Supported 
532       * bytes per sample by this method are 8, 16, 24, 32. 
533       * @param channels Number of audio channels. 
534       * @return The linear signed pcm big endian byte array formatted 
535       * array representation of <i>doubleArray</i>. The length of 
536       * the returned array is (doubleArray.length*bytesPerSample*channels). 
537       */ 
538      private byte[] _doubleArrayToByteArray(double[][] doubleArray, 
539                                             int bytesPerSample, int channels) { 
540          // All channels had better have the same number 
541          // of samples! This is not checked! 
542          int lengthInSamples = doubleArray[0].length; 
543          //double  maxSample = Math.pow(2, 8 * bytesPerSample - 1); 
544          // Could use above line, but hopefully, code below will 
545          // be faster. 
546          double maxSample; 
547          double maxDoubleValuedSample; 
548          if (bytesPerSample == 2) { 
549              maxSample = 32768; 
550          } else if (bytesPerSample == 1) { 
551              maxSample = 128; 
552          } else if (bytesPerSample == 3) { 
553              maxSample = 8388608; 
554          } else if (bytesPerSample == 4) { 
555              maxSample = 147483648e9; 
556          } else { 
557              // Should not happen. 
558              maxSample = 0; 
559          } 
560          maxDoubleValuedSample = (maxSample - 2) / maxSample; 
561          byte[] byteArray = 
562                  new byte[lengthInSamples * bytesPerSample * channels]; 
563          byte[] b = new byte[bytesPerSample]; 
564          for (int currSamp = 0; currSamp < lengthInSamples; currSamp++) { 
565   
566              int l; 
567              // For each channel, 
568              for (int currChannel = 0; currChannel < channels; currChannel++) { 
569                  // Perform clipping, if necessary. 
570                  if (doubleArray[currChannel][currSamp] >= 
571                          maxDoubleValuedSample) { 
572                      l = (int) maxSample - 2; 
573                  } else if (doubleArray[currChannel][currSamp] <= 
574                          -maxDoubleValuedSample) { 
575                      l = (int) (-maxSample) + 2; 
576                  } else { 
577                      // signed integer representation of current sample of the 
578                      // current channel. 
579                      l = 
580                              (int) (doubleArray[currChannel][currSamp] * maxSample); 
581                  } 
582                  // Create byte representation of current sample. 
583                  for (int i = 0; i < bytesPerSample; i += 1, l >>= 8) 
584                      b[bytesPerSample - i - 1] = (byte) l; 
585                  // Copy the byte representation of current sample to 
586                  // the linear signed pcm big endian formatted byte array. 
587                  for (int i = 0; i < bytesPerSample; i += 1) { 
588                      byteArray[currSamp * bytesPerSample * channels + 
589                              bytesPerSample * currChannel + i] = b[i]; 
590                  } 
591              } 
592          } 
593          return byteArray; 
594      } 
595   
596      /* Convert a integer array of audio samples into a byte array of 
597       * audio samples in linear signed pcm big endian format. 
598       * The samples contained by <i>intArray</i> should be in the range 
599       * (-2^(bits_per_sample/2), 2^(bits_per_sample/2)). Samples that 
600       * are outside this range will be hard-clipped to fall within this 
601       * range. 
602       * @param intArray Two dimensional array holding audio samples. 
603       * For each channel, m, doubleArray[m] is a single dimensional 
604       * array containing samples for channel m. 
605       * @param bytesPerSample Number of bytes per sample. Supported 
606       * bytes per sample by this method are 8, 16, 24, 32. 
607       * @param channels Number of audio channels. 
608       * @return The linear signed pcm big endian byte array formatted 
609       * array representation of <i>doubleArray</i>. The length of 
610       * the returned array is (doubleArray.length*bytesPerSample*channels). 
611       */ 
612      private byte[] _intArrayToByteArray(int[][] intArray, 
613                                          int bytesPerSample, int channels) { 
614          // All channels had better have the same number 
615          // of samples! This is not checked! 
616          int lengthInSamples = intArray[0].length; 
617   
618          byte[] byteArray = 
619                  new byte[lengthInSamples * bytesPerSample * channels]; 
620          byte[] b = new byte[bytesPerSample]; 
621          for (int currSamp = 0; currSamp < lengthInSamples; currSamp++) { 
622   
623              // For each channel, 
624              for (int currChannel = 0; currChannel < channels; currChannel++) { 
625                  // signed integer representation of current sample of the 
626                  // current channel. 
627                  int l = 
628                          intArray[currChannel][currSamp]; 
629                  // Perform clipping, if necessary. 
630                  int maxSample; 
631                  if (bytesPerSample == 2) { 
632                      maxSample = 32768; 
633                  } else if (bytesPerSample == 1) { 
634                      maxSample = 128; 
635                  } else if (bytesPerSample == 3) { 
636                      maxSample = 8388608; 
637                  } else if (bytesPerSample == 4) { 
638                      maxSample = 1474836480; 
639                  } else { 
640                      // Should not happen. 
641                      maxSample = 0; 
642                  } 
643                  if (l > (maxSample - 1)) { 
644                      l = maxSample - 1; 
645                  } else if (l < (-maxSample + 1)) { 
646                      l = -maxSample + 1; 
647                  } 
648                  // Create byte representation of current sample. 
649                  for (int i = 0; i < bytesPerSample; i += 1, l >>= 8) 
650                      b[bytesPerSample - i - 1] = (byte) l; 
651                  // Copy the byte representation of current sample to 
652                  // the linear signed pcm big endian formatted byte array. 
653                  for (int i = 0; i < bytesPerSample; i += 1) { 
654                      byteArray[currSamp * bytesPerSample * channels + 
655                              bytesPerSample * currChannel + i] = b[i]; 
656                  } 
657              } 
658          } 
659          return byteArray; 
660      } 
661   
662      /////////////////////////////////////////////////////////////////// 
663      ////                         private variables                 //// 
664   
665      private int _productionRate; 
666      private String _fileName; 
667      private String _playbackMode; 
668      private int _sampleSizeInBits; 
669      private int _putSamplesSize; 
670      private float _sampleRate; 
671      private int _channels; 
672      private int _bufferSize; 
673      private SourceDataLine _sourceLine; 
674      // Array of audio samples in byte format. 
675      private byte[] _data; 
676      private int _frameSizeInBytes; 
677      private ArrayList _toFileBuffer; 
678      // This is the format of _toFileBuffer. 
679      private AudioFormat _playToFileFormat; 
680      private int _bytesPerSample; 
681      private boolean _isAudioPlaybackActive; 
682  } 
683