Example usage for javax.sound.sampled AudioFormat getSampleRate

List of usage examples for javax.sound.sampled AudioFormat getSampleRate

Introduction

In this page you can find the example usage for javax.sound.sampled AudioFormat getSampleRate.

Prototype

public float getSampleRate() 

Source Link

Document

Obtains the sample rate.

Usage

From source file:io.github.dsheirer.record.wave.WaveWriter.java

/**
 * Creates an audio format chunk//from   ww  w  .java  2  s  . c  om
 */
public static ByteBuffer getFormatChunk(AudioFormat format) {
    ByteBuffer header = ByteBuffer.allocate(24).order(ByteOrder.LITTLE_ENDIAN);

    //Format descriptor
    header.put(FORMAT_CHUNK_ID.getBytes());
    header.putInt(FORMAT_CHUNK_LENGTH);
    header.putShort(FORMAT_UNCOMPRESSED_PCM);
    header.putShort((short) format.getChannels());
    header.putInt((int) format.getSampleRate());

    //Byte Rate = sample rate * channels * bits per sample / 8
    int frameByteRate = format.getChannels() * format.getSampleSizeInBits() / 8;
    int byteRate = (int) (format.getSampleRate() * frameByteRate);
    header.putInt(byteRate);

    //Block Align
    header.putShort((short) frameByteRate);

    //Bits per Sample
    header.putShort((short) format.getSampleSizeInBits());

    //Reset the buffer pointer to 0
    header.position(0);

    return header;
}

From source file:it.univpm.deit.semedia.musicuri.core.Toolset.java

/**
 * Extracts/encodes the AudioSignatureDS for a given audio file
 * @param file the audio file to encode 
 * @return a string containing the whole XML-formatted MPEG-7 description document
 *//*from   w w  w.  j  a  va 2 s.c  om*/
public static String createMPEG7Description(File file) throws IOException {
    if (isSupportedAudioFile(file)) {
        System.out.println("Extracting Query Audio Signature");
        String xmlString = null;
        Config configuration = new ConfigDefault();
        configuration.enableAll(false);
        configuration.setValue("AudioSignature", "enable", true);
        configuration.setValue("AudioSignature", "decimation", 32);
        //System.out.println("File: " + file.getName());

        AudioInputStream ais = null;
        try {
            ais = AudioSystem.getAudioInputStream(file);
            AudioFormat f = ais.getFormat();
            if (f.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                System.out.println("Converting Audio stream format");
                ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais);
                f = ais.getFormat();
            }

            String workingDir = getCWD();
            String tempFilename = workingDir + "/temp.wav";
            AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(tempFilename));

            File tmpFile = new File(tempFilename);
            AudioInFloatSampled audioin = new AudioInFloatSampled(tmpFile);

            String str = tmpFile.getCanonicalPath();
            String[] ar = { str };
            //xmlString = Encoder.fromWAVtoXML(ar);

            // gather information about audio file
            MP7MediaInformation media_info = new MP7MediaInformation();
            media_info.setFileSize(tmpFile.length());

            AudioFormat format = audioin.getSourceFormat();
            media_info.setSample(format.getSampleRate(), format.getSampleSizeInBits());
            media_info.setNumberOfChannels(audioin.isMono() ? 1 : 2);

            // create mpeg-7 writer
            MP7Writer mp7writer = new MP7Writer();
            mp7writer.setMediaInformation(media_info);

            // create encoder
            Encoder encoder = null;

            Config config = new ConfigDefault();
            config.enableAll(false);
            config.setValue("AudioSignature", "enable", true);
            config.setValue("AudioSignature", "decimation", 32);
            encoder = new Encoder(audioin.getSampleRate(), mp7writer, config);
            //encoder.addTimeElapsedListener(new Ticker(System.err));

            // copy audio signal from source to encoder
            long oldtime = System.currentTimeMillis();
            float[] audio;
            while ((audio = audioin.get()) != null) {
                if (!audioin.isMono())
                    audio = AudioInFloat.getMono(audio);
                encoder.put(audio);
            }
            encoder.flush();
            System.out.println("Extraction Time     : " + (System.currentTimeMillis() - oldtime) + " ms");

            // whole MPEG-7 description into a string
            xmlString = mp7writer.toString();
            //System.out.println( xmlString )

        } catch (Exception e) {
            e.printStackTrace(System.err);
        } finally {
            //ais.close();
        }

        return xmlString;
    } else {
        System.out.println("Unsupported audio file format");
        return null;
    }
}

From source file:edu.tsinghua.lumaqq.Sounder.java

/**
 * //from  w ww  . j ava 2s. com
 * @param filename
 * @return
 */
private boolean loadSound(String filename) {
    // ??
    File file = new File(filename);
    try {
        currentSound = AudioSystem.getAudioInputStream(file);
    } catch (Exception e) {
        try {
            FileInputStream is = new FileInputStream(file);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    // ??????
    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            // ?? ALAW/ULAW ?  ALAW/ULAW ?? PCM                
            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.open(stream);
            currentSound = clip;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }
            log.trace("Sequence Created");
        } catch (InvalidMidiDataException imde) {
            log.error("???");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    return true;
}

From source file:SimpleSoundPlayer.java

public boolean loadSound(Object object) {
    duration = 0.0;/* w w  w . jav  a 2 s  . co m*/

    currentName = ((File) object).getName();
    try {
        currentSound = AudioSystem.getAudioInputStream((File) object);
    } catch (Exception e1) {
        try {
            FileInputStream is = new FileInputStream((File) object);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception e3) {
            e3.printStackTrace();
            currentSound = null;
            return false;
        }
        // }
    }

    // user pressed stop or changed tabs while loading
    if (sequencer == null) {
        currentSound = null;
        return false;
    }

    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            /**
             * we can't yet open the device for ALAW/ULAW playback, convert
             * ALAW/ULAW to PCM
             */

            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.addLineListener(this);
            clip.open(stream);
            currentSound = clip;
            // seekSlider.setMaximum((int) stream.getFrameLength());
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }

        } catch (InvalidMidiDataException imde) {
            System.out.println("Unsupported audio file.");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    }

    duration = getDuration();

    return true;
}

From source file:com.limegroup.gnutella.gui.mp3.BasicPlayer.java

/**
 * Inits a DateLine.<br>/*ww w .  ja v  a2s  .c o  m*/
 *
 * We check if the line supports Volume and Pan controls.
 *
 * From the AudioInputStream, i.e. from the sound file, we
 * fetch information about the format of the audio data. These
 * information include the sampling frequency, the number of
 * channels and the size of the samples. There information
 * are needed to ask JavaSound for a suitable output line
 * for this audio file.
 * Furthermore, we have to give JavaSound a hint about how
 * big the internal buffer for the line should be. Here,
 * we say AudioSystem.NOT_SPECIFIED, signaling that we don't
 * care about the exact size. JavaSound will use some default
 * value for the buffer size.
 */
private void createLine() throws LineUnavailableException {
    if (m_line == null) {
        AudioFormat sourceFormat = m_audioInputStream.getFormat();
        if (LOG.isDebugEnabled())
            LOG.debug("Source format : " + sourceFormat);
        AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
                sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2,
                sourceFormat.getSampleRate(), false);

        if (LOG.isDebugEnabled())
            LOG.debug("Target format: " + targetFormat);
        m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
        AudioFormat audioFormat = m_audioInputStream.getFormat();
        if (LOG.isDebugEnabled())
            LOG.debug("Create Line : " + audioFormat);
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
        m_line = (SourceDataLine) AudioSystem.getLine(info);

        /*-- Display supported controls --*/
        Control[] c = m_line.getControls();
        for (int p = 0; p < c.length; p++) {
            if (LOG.isDebugEnabled())
                LOG.debug("Controls : " + c[p].toString());
        }
        /*-- Is Gain Control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
            m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN);
            if (LOG.isDebugEnabled())
                LOG.debug("Master Gain Control : [" + m_gainControl.getMinimum() + ","
                        + m_gainControl.getMaximum() + "]," + m_gainControl.getPrecision());
        }

        /*-- Is Pan control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.PAN)) {
            m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN);
            if (LOG.isDebugEnabled())
                LOG.debug("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "],"
                        + m_panControl.getPrecision());
        }
    }
}

From source file:com.player.BasicMP3Player.java

/**
 * Inits a DateLine.<br>/*from w  w  w  .j  a  v a 2  s.c o  m*/
 * We check if the line supports Gain and Pan controls. From the AudioInputStream, i.e. from the
 * sound file, we fetch information about the format of the audio data. These information include
 * the sampling frequency, the number of channels and the size of the samples. There information
 * are needed to ask JavaSound for a suitable output line for this audio file. Furthermore, we
 * have to give JavaSound a hint about how big the internal buffer for the line should be. Here,
 * we say AudioSystem.NOT_SPECIFIED, signaling that we don't care about the exact size. JavaSound
 * will use some default value for the buffer size.
 */
private void createLine() throws LineUnavailableException {
    log.info("Create Line");
    if (m_line == null) {
        AudioFormat sourceFormat = m_audioInputStream.getFormat();
        log.info("Create Line : Source format : " + sourceFormat.toString());
        AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
                sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2,
                sourceFormat.getSampleRate(), false);
        log.info("Create Line : Target format: " + targetFormat);
        // Keep a reference on encoded stream to progress notification.
        m_encodedaudioInputStream = m_audioInputStream;
        try {
            // Get total length in bytes of the encoded stream.
            encodedLength = m_encodedaudioInputStream.available();
        } catch (IOException e) {
            log.error("Cannot get m_encodedaudioInputStream.available()", e);
        }
        // Create decoded stream.
        m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
        AudioFormat audioFormat = m_audioInputStream.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
        m_line = (SourceDataLine) AudioSystem.getLine(info);

        /*-- Display supported controls --*/
        Control[] c = m_line.getControls();
        for (int p = 0; p < c.length; p++) {
            log.debug("Controls : " + c[p].toString());
        }

        /*-- Is Gain Control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
            m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN);
            log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum()
                    + "] " + m_gainControl.getPrecision());
        }

        /*-- Is Pan control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.PAN)) {
            m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN);
            log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] "
                    + m_panControl.getPrecision());
        }
    }
}

From source file:com.player.BasicMP3Player.java

/**
 * Inits AudioInputStream and AudioFileFormat from the data source.
 * /*from   w w w.  j  a v a  2 s  .  c o  m*/
 * @throws BasicPlayerException
 */
private void initAudioInputStream() throws BasicPlayerException {
    try {
        reset();
        notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
        if (m_dataSource instanceof URL) {
            initAudioInputStream((URL) m_dataSource);
        } else if (m_dataSource instanceof File) {
            initAudioInputStream((File) m_dataSource);
        } else if (m_dataSource instanceof InputStream) {
            initAudioInputStream((InputStream) m_dataSource);
        }
        createLine();
        // Notify listeners with AudioFileFormat properties.
        Map properties = null;
        if (m_audioFileFormat instanceof TAudioFileFormat) {
            // Tritonus SPI compliant audio file format.
            properties = ((TAudioFileFormat) m_audioFileFormat).properties();
            // Clone the Map because it is not mutable.
            properties = deepCopy(properties);
        } else
            properties = new HashMap();
        // Add JavaSound properties.
        if (m_audioFileFormat.getByteLength() > 0)
            properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
        if (m_audioFileFormat.getFrameLength() > 0)
            properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
        if (m_audioFileFormat.getType() != null)
            properties.put("audio.type", (m_audioFileFormat.getType().toString()));
        // Audio format.
        AudioFormat audioFormat = m_audioFileFormat.getFormat();
        if (audioFormat.getFrameRate() > 0)
            properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
        if (audioFormat.getFrameSize() > 0)
            properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
        if (audioFormat.getSampleRate() > 0)
            properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
        if (audioFormat.getSampleSizeInBits() > 0)
            properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
        if (audioFormat.getChannels() > 0)
            properties.put("audio.channels", new Integer(audioFormat.getChannels()));
        if (audioFormat instanceof TAudioFormat) {
            // Tritonus SPI compliant audio format.
            Map addproperties = ((TAudioFormat) audioFormat).properties();
            properties.putAll(addproperties);
        }
        Iterator it = m_listeners.iterator();
        while (it.hasNext()) {
            BasicPlayerListener bpl = (BasicPlayerListener) it.next();
            bpl.opened(m_dataSource, properties);
        }
        m_status = OPENED;
        notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
    } catch (LineUnavailableException e) {
        throw new BasicPlayerException(e);
    } catch (UnsupportedAudioFileException e) {
        throw new BasicPlayerException(e);
    } catch (IOException e) {
        throw new BasicPlayerException(e);
    }
}

From source file:BasicPlayer.java

/**
 * Inits AudioInputStream and AudioFileFormat from the data source.
 * @throws BasicPlayerException//from  ww w  . ja va 2  s.  co m
 */
protected void initAudioInputStream() throws BasicPlayerException {
    try {
        reset();
        notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
        if (m_dataSource instanceof URL) {
            initAudioInputStream((URL) m_dataSource);
        } else if (m_dataSource instanceof File) {
            initAudioInputStream((File) m_dataSource);
        } else if (m_dataSource instanceof InputStream) {
            initAudioInputStream((InputStream) m_dataSource);
        }
        createLine();
        // Notify listeners with AudioFileFormat properties.
        Map properties = null;
        if (m_audioFileFormat instanceof TAudioFileFormat) {
            // Tritonus SPI compliant audio file format.
            properties = ((TAudioFileFormat) m_audioFileFormat).properties();
            // Clone the Map because it is not mutable.
            properties = deepCopy(properties);
        } else
            properties = new HashMap();
        // Add JavaSound properties.
        if (m_audioFileFormat.getByteLength() > 0)
            properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
        if (m_audioFileFormat.getFrameLength() > 0)
            properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
        if (m_audioFileFormat.getType() != null)
            properties.put("audio.type", (m_audioFileFormat.getType().toString()));
        // Audio format.
        AudioFormat audioFormat = m_audioFileFormat.getFormat();
        if (audioFormat.getFrameRate() > 0)
            properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
        if (audioFormat.getFrameSize() > 0)
            properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
        if (audioFormat.getSampleRate() > 0)
            properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
        if (audioFormat.getSampleSizeInBits() > 0)
            properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
        if (audioFormat.getChannels() > 0)
            properties.put("audio.channels", new Integer(audioFormat.getChannels()));
        if (audioFormat instanceof TAudioFormat) {
            // Tritonus SPI compliant audio format.
            Map addproperties = ((TAudioFormat) audioFormat).properties();
            properties.putAll(addproperties);
        }
        // Add SourceDataLine
        properties.put("basicplayer.sourcedataline", m_line);
        Iterator it = m_listeners.iterator();
        while (it.hasNext()) {
            BasicPlayerListener bpl = (BasicPlayerListener) it.next();
            bpl.opened(m_dataSource, properties);
        }
        m_status = OPENED;
        notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
    } catch (LineUnavailableException e) {
        throw new BasicPlayerException(e);
    } catch (UnsupportedAudioFileException e) {
        throw new BasicPlayerException(e);
    } catch (IOException e) {
        throw new BasicPlayerException(e);
    }
}

From source file:BasicPlayer.java

/**
 * Inits a DateLine.<br>//www  .j  av a 2s .c  o  m
 *
 * We check if the line supports Gain and Pan controls.
 *
 * From the AudioInputStream, i.e. from the sound file, we
 * fetch information about the format of the audio data. These
 * information include the sampling frequency, the number of
 * channels and the size of the samples. There information
 * are needed to ask JavaSound for a suitable output line
 * for this audio file.
 * Furthermore, we have to give JavaSound a hint about how
 * big the internal buffer for the line should be. Here,
 * we say AudioSystem.NOT_SPECIFIED, signaling that we don't
 * care about the exact size. JavaSound will use some default
 * value for the buffer size.
 */
protected void createLine() throws LineUnavailableException {
    log.info("Create Line");
    if (m_line == null) {
        AudioFormat sourceFormat = m_audioInputStream.getFormat();
        log.info("Create Line : Source format : " + sourceFormat.toString());
        int nSampleSizeInBits = sourceFormat.getSampleSizeInBits();
        if (nSampleSizeInBits <= 0)
            nSampleSizeInBits = 16;
        if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW)
                || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW))
            nSampleSizeInBits = 16;
        if (nSampleSizeInBits != 8)
            nSampleSizeInBits = 16;
        AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
                sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(),
                sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false);
        log.info("Create Line : Target format: " + targetFormat);
        // Keep a reference on encoded stream to progress notification.
        m_encodedaudioInputStream = m_audioInputStream;
        try {
            // Get total length in bytes of the encoded stream.
            encodedLength = m_encodedaudioInputStream.available();
        } catch (IOException e) {
            log.error("Cannot get m_encodedaudioInputStream.available()", e);
        }
        // Create decoded stream.
        m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
        AudioFormat audioFormat = m_audioInputStream.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
        Mixer mixer = getMixer(m_mixerName);
        if (mixer != null) {
            log.info("Mixer : " + mixer.getMixerInfo().toString());
            m_line = (SourceDataLine) mixer.getLine(info);
        } else {
            m_line = (SourceDataLine) AudioSystem.getLine(info);
            m_mixerName = null;
        }
        log.info("Line : " + m_line.toString());
        log.debug("Line Info : " + m_line.getLineInfo().toString());
        log.debug("Line AudioFormat: " + m_line.getFormat().toString());
    }
}