List of usage examples for javax.sound.sampled AudioSystem getAudioInputStream
public static AudioInputStream getAudioInputStream(AudioFormat targetFormat, AudioInputStream sourceStream)
From source file:com.limegroup.gnutella.gui.mp3.BasicPlayer.java
/** * Inits a DateLine.<br>/* ww w .ja v a 2 s. co m*/ * * We check if the line supports Volume and Pan controls. * * From the AudioInputStream, i.e. from the sound file, we * fetch information about the format of the audio data. These * information include the sampling frequency, the number of * channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line * for this audio file. * Furthermore, we have to give JavaSound a hint about how * big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't * care about the exact size. JavaSound will use some default * value for the buffer size. */ private void createLine() throws LineUnavailableException { if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); if (LOG.isDebugEnabled()) LOG.debug("Source format : " + sourceFormat); AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2, sourceFormat.getSampleRate(), false); if (LOG.isDebugEnabled()) LOG.debug("Target format: " + targetFormat); m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); if (LOG.isDebugEnabled()) LOG.debug("Create Line : " + audioFormat); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); m_line = (SourceDataLine) AudioSystem.getLine(info); /*-- Display supported controls --*/ Control[] c = m_line.getControls(); for (int p = 0; p < c.length; p++) { if (LOG.isDebugEnabled()) LOG.debug("Controls : " + c[p].toString()); } /*-- Is Gain Control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) { m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN); if (LOG.isDebugEnabled()) LOG.debug("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "]," + m_gainControl.getPrecision()); } /*-- Is Pan control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.PAN)) { m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN); if (LOG.isDebugEnabled()) LOG.debug("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "]," + m_panControl.getPrecision()); } } }
From source file:com.player.BasicMP3Player.java
/** * Inits a DateLine.<br>// w w w.j a v a 2s.c om * We check if the line supports Gain and Pan controls. From the AudioInputStream, i.e. from the * sound file, we fetch information about the format of the audio data. These information include * the sampling frequency, the number of channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line for this audio file. Furthermore, we * have to give JavaSound a hint about how big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't care about the exact size. JavaSound * will use some default value for the buffer size. */ private void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2, sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.error("Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); m_line = (SourceDataLine) AudioSystem.getLine(info); /*-- Display supported controls --*/ Control[] c = m_line.getControls(); for (int p = 0; p < c.length; p++) { log.debug("Controls : " + c[p].toString()); } /*-- Is Gain Control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) { m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN); log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "] " + m_gainControl.getPrecision()); } /*-- Is Pan control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.PAN)) { m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN); log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] " + m_panControl.getPrecision()); } } }
From source file:BasicPlayer.java
/** * Inits a DateLine.<br>// w ww . j a v a2 s . c o m * * We check if the line supports Gain and Pan controls. * * From the AudioInputStream, i.e. from the sound file, we * fetch information about the format of the audio data. These * information include the sampling frequency, the number of * channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line * for this audio file. * Furthermore, we have to give JavaSound a hint about how * big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't * care about the exact size. JavaSound will use some default * value for the buffer size. */ protected void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); int nSampleSizeInBits = sourceFormat.getSampleSizeInBits(); if (nSampleSizeInBits <= 0) nSampleSizeInBits = 16; if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) nSampleSizeInBits = 16; if (nSampleSizeInBits != 8) nSampleSizeInBits = 16; AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.error("Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); Mixer mixer = getMixer(m_mixerName); if (mixer != null) { log.info("Mixer : " + mixer.getMixerInfo().toString()); m_line = (SourceDataLine) mixer.getLine(info); } else { m_line = (SourceDataLine) AudioSystem.getLine(info); m_mixerName = null; } log.info("Line : " + m_line.toString()); log.debug("Line Info : " + m_line.getLineInfo().toString()); log.debug("Line AudioFormat: " + m_line.getFormat().toString()); } }
From source file:sx.blah.discord.api.internal.DiscordUtils.java
/** * Converts an {@link AudioInputStream} to 48000Hz 16 bit stereo signed Big Endian PCM format. * * @param stream The original stream.//w w w .j a v a2 s .com * @return The PCM encoded stream. */ public static AudioInputStream getPCMStream(AudioInputStream stream) { AudioFormat baseFormat = stream.getFormat(); //Converts first to PCM data. If the data is already PCM data, this will not change anything. AudioFormat toPCM = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), //AudioConnection.OPUS_SAMPLE_RATE, baseFormat.getSampleSizeInBits() != -1 ? baseFormat.getSampleSizeInBits() : 16, baseFormat.getChannels(), //If we are given a frame size, use it. Otherwise, assume 16 bits (2 8bit shorts) per channel. baseFormat.getFrameSize() != -1 ? baseFormat.getFrameSize() : 2 * baseFormat.getChannels(), baseFormat.getFrameRate() != -1 ? baseFormat.getFrameRate() : baseFormat.getSampleRate(), baseFormat.isBigEndian()); AudioInputStream pcmStream = AudioSystem.getAudioInputStream(toPCM, stream); //Then resamples to a sample rate of 48000hz and ensures that data is Big Endian. AudioFormat audioFormat = new AudioFormat(toPCM.getEncoding(), OpusUtil.OPUS_SAMPLE_RATE, toPCM.getSampleSizeInBits(), toPCM.getChannels(), toPCM.getFrameSize(), toPCM.getFrameRate(), true); return AudioSystem.getAudioInputStream(audioFormat, pcmStream); }