List of usage examples for javax.sound.sampled AudioFormat toString
@Override
public String toString()
From source file:com.player.BasicMP3Player.java
/** * Inits a DateLine.<br>/*from w w w. j a va2 s .c o m*/ * We check if the line supports Gain and Pan controls. From the AudioInputStream, i.e. from the * sound file, we fetch information about the format of the audio data. These information include * the sampling frequency, the number of channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line for this audio file. Furthermore, we * have to give JavaSound a hint about how big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't care about the exact size. JavaSound * will use some default value for the buffer size. */ private void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2, sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.error("Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); m_line = (SourceDataLine) AudioSystem.getLine(info); /*-- Display supported controls --*/ Control[] c = m_line.getControls(); for (int p = 0; p < c.length; p++) { log.debug("Controls : " + c[p].toString()); } /*-- Is Gain Control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) { m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN); log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "] " + m_gainControl.getPrecision()); } /*-- Is Pan control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.PAN)) { m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN); log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] " + m_panControl.getPrecision()); } } }
From source file:BasicPlayer.java
/** * Inits a DateLine.<br>/*from ww w .j a v a 2s .c o m*/ * * We check if the line supports Gain and Pan controls. * * From the AudioInputStream, i.e. from the sound file, we * fetch information about the format of the audio data. These * information include the sampling frequency, the number of * channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line * for this audio file. * Furthermore, we have to give JavaSound a hint about how * big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't * care about the exact size. JavaSound will use some default * value for the buffer size. */ protected void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); int nSampleSizeInBits = sourceFormat.getSampleSizeInBits(); if (nSampleSizeInBits <= 0) nSampleSizeInBits = 16; if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) nSampleSizeInBits = 16; if (nSampleSizeInBits != 8) nSampleSizeInBits = 16; AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.error("Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); Mixer mixer = getMixer(m_mixerName); if (mixer != null) { log.info("Mixer : " + mixer.getMixerInfo().toString()); m_line = (SourceDataLine) mixer.getLine(info); } else { m_line = (SourceDataLine) AudioSystem.getLine(info); m_mixerName = null; } log.info("Line : " + m_line.toString()); log.debug("Line Info : " + m_line.getLineInfo().toString()); log.debug("Line AudioFormat: " + m_line.getFormat().toString()); } }