Example usage for javax.sound.sampled AudioFormat AudioFormat

List of usage examples for javax.sound.sampled AudioFormat AudioFormat

Introduction

In this page you can find the example usage for javax.sound.sampled AudioFormat AudioFormat.

Prototype

public AudioFormat(Encoding encoding, float sampleRate, int sampleSizeInBits, int channels, int frameSize,
        float frameRate, boolean bigEndian) 

Source Link

Document

Constructs an AudioFormat with the given parameters.

Usage

From source file:Main.java

public static void main(String[] argv) throws Exception {
    AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile"));

    // From URL//from  w  w  w. j  av  a 2  s. c o m
    // stream = AudioSystem.getAudioInputStream(new URL(
    // "http://hostname/audiofile"));

    AudioFormat format = stream.getFormat();
    if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
        format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                format.getFrameRate(), true); // big endian
        stream = AudioSystem.getAudioInputStream(format, stream);
    }

    DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
            ((int) stream.getFrameLength() * format.getFrameSize()));
    Clip clip = (Clip) AudioSystem.getLine(info);

    clip.open(stream);

    clip.start();
}

From source file:Main.java

public static void main(String[] argv) throws Exception {
    AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile"));
    //    stream = AudioSystem.getAudioInputStream(new URL(
    //      "http://hostname/audiofile"));

    AudioFormat format = stream.getFormat();
    if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
        format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                format.getFrameRate(), true); // big endian
        stream = AudioSystem.getAudioInputStream(format, stream);
    }/*from w  ww .j a  v a2  s  .  co  m*/

    SourceDataLine.Info info = new DataLine.Info(SourceDataLine.class, stream.getFormat(),
            ((int) stream.getFrameLength() * format.getFrameSize()));
    SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
    line.open(stream.getFormat());
    line.start();

    int numRead = 0;
    byte[] buf = new byte[line.getBufferSize()];
    while ((numRead = stream.read(buf, 0, buf.length)) >= 0) {
        int offset = 0;
        while (offset < numRead) {
            offset += line.write(buf, offset, numRead - offset);
        }
    }
    line.drain();
    line.stop();
}

From source file:marytts.tests.junit4.EnvironmentTest.java

@Test
public void testMP3Available() throws Exception {
    AudioFormat mp3af = new AudioFormat(new AudioFormat.Encoding("MPEG1L3"), AudioSystem.NOT_SPECIFIED,
            AudioSystem.NOT_SPECIFIED, 1, AudioSystem.NOT_SPECIFIED, AudioSystem.NOT_SPECIFIED, false);
    AudioInputStream waveStream = AudioSystem
            .getAudioInputStream(this.getClass().getResourceAsStream("test.wav"));
    // Now attempt conversion:
    if (MaryRuntimeUtils.canCreateMP3()) {
        assertTrue(AudioSystem.isConversionSupported(mp3af, waveStream.getFormat()));
        AudioInputStream mp3Stream = AudioSystem.getAudioInputStream(mp3af, waveStream);
    } else {/*from  w  ww  . ja v a 2  s.  c om*/
        assertFalse(AudioSystem.isConversionSupported(mp3af, waveStream.getFormat()));
    }
}

From source file:org.snitko.app.playback.PlaySound.java

private AudioFormat getOutFormat(AudioFormat inFormat) {
    final int ch = inFormat.getChannels();
    final float rate = inFormat.getSampleRate();
    return new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, rate, 16, ch, ch * 2, rate, false);
}

From source file:sec_algo.aud_sec.java

public BufferedWriter getAudioStream() {
    FileInputStream fin = null;/*  w w w.ja v  a2 s . c om*/
    BufferedWriter audstream = null;

    try {
        fin = new FileInputStream(this.file);
        //           audstream = new BufferedWriter(new FileWriter(returnFileName()+"_ex."+returnFileExt()));
        //           byte contents[] = new byte[100];
        //           while(fin.read(contents)!= -1){
        //               System.out.println("reading & writing from file");
        //               for(byte b : contents)
        //                   for(int x = 0; x < 8; x++)
        //                       audstream.write(b>>x & 1);
        //           }
        //           System.out.println("Finished!");
        //           System.out.println("audstream contents: " + audstream.toString());
        byte[] header = new byte[8];
        fin.read(header);
        fin.close();
        //           System.out.println("header bytes: " + Arrays.toString(header));
        ArrayList<String> bitstring = new ArrayList<String>();
        for (int i = 0; i < header.length; i++)
            bitstring.add(String.format("%8s", Integer.toBinaryString(header[i] & 0xFF)).replace(' ', '0'));
        System.out.print("bit input: [/");
        for (int i = 0; i < bitstring.size(); i++) {
            System.out.print(bitstring.get(i) + " ");
        }
        System.out.println("]/");

        System.out.println(bitstring.get(0) + " " + bitstring.get(1) + " " + bitstring.get(2));
        System.out.println("Bitrate index: " + bitstring.get(2).substring(0, 4));

        AudioInputStream in = AudioSystem.getAudioInputStream(this.file);
        AudioInputStream din = null;
        AudioFormat baseFormat = in.getFormat();
        AudioFormat decodedFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(),
                getBitrate(bitstring.get(2).substring(0, 4)), baseFormat.getChannels(),
                baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
        din = AudioSystem.getAudioInputStream(decodedFormat, in);
        int size = din.available();
        byte[] bytaud = new byte[size];
        din.read(bytaud);
        bitstring = new ArrayList<String>();
        for (int i = 0; i < header.length; i++)
            bitstring.add(String.format("%8s", Integer.toBinaryString(header[i] & 0xFF)).replace(' ', '0'));
        System.out.print("bit input: [/");
        for (int i = 0; i < bitstring.size(); i++) {
            System.out.print(bitstring.get(i) + " ");
        }
        System.out.println("]/");
        in.close();
        din.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
    return audstream;
}

From source file:SimpleSoundPlayer.java

public boolean loadSound(Object object) {
    duration = 0.0;/* www  . j a va 2  s  .co m*/

    currentName = ((File) object).getName();
    try {
        currentSound = AudioSystem.getAudioInputStream((File) object);
    } catch (Exception e1) {
        try {
            FileInputStream is = new FileInputStream((File) object);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception e3) {
            e3.printStackTrace();
            currentSound = null;
            return false;
        }
        // }
    }

    // user pressed stop or changed tabs while loading
    if (sequencer == null) {
        currentSound = null;
        return false;
    }

    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            /**
             * we can't yet open the device for ALAW/ULAW playback, convert
             * ALAW/ULAW to PCM
             */

            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.addLineListener(this);
            clip.open(stream);
            currentSound = clip;
            // seekSlider.setMaximum((int) stream.getFrameLength());
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }

        } catch (InvalidMidiDataException imde) {
            System.out.println("Unsupported audio file.");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    }

    duration = getDuration();

    return true;
}

From source file:edu.tsinghua.lumaqq.Sounder.java

/**
 * //from w  w  w  .  j  a v a  2 s. c o  m
 * @param filename
 * @return
 */
private boolean loadSound(String filename) {
    // ??
    File file = new File(filename);
    try {
        currentSound = AudioSystem.getAudioInputStream(file);
    } catch (Exception e) {
        try {
            FileInputStream is = new FileInputStream(file);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    // ??????
    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            // ?? ALAW/ULAW ?  ALAW/ULAW ?? PCM                
            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.open(stream);
            currentSound = clip;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }
            log.trace("Sequence Created");
        } catch (InvalidMidiDataException imde) {
            log.error("???");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    return true;
}

From source file:org.sipfoundry.sipxconfig.site.common.AssetSelector.java

public static boolean isAcceptedAudioFormat(InputStream stream) {
    try {//from   w ww.  j  a  v  a2  s .  c o m
        InputStream testedStream = stream;
        // HACK: looks like in openjdk InputStream does not support mark/reset
        if (!stream.markSupported()) {
            // getAudioInputStream depends on mark reset do we wrap buffered input stream
            // around passed stream
            testedStream = new BufferedInputStream(stream);
        }
        AudioInputStream audio = AudioSystem.getAudioInputStream(new BufferedInputStream(testedStream));
        AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 8000, // sample
                // rate
                16, // bits per sample
                1, // channels
                2, // frame rate
                8000, // frame size
                false); // isBigEndian)
        return format.matches(audio.getFormat());
    } catch (IOException e) {
        LOG.warn("Uploaded file problems.", e);
    } catch (UnsupportedAudioFileException e) {
        LOG.info("Unsupported format", e);
    }
    return false;
}

From source file:org.yccheok.jstock.chat.Utils.java

public static void playSound(final Sound sound) {
    if (sounds.size() == 0) {
        for (Sound s : Sound.values()) {
            AudioInputStream stream = null;
            Clip clip = null;//  w w  w .j a va  2  s .c  o  m

            try {
                switch (s) {
                case ALERT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "alert.wav"));
                    break;
                case LOGIN:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "login.wav"));
                    break;
                case LOGOUT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "logout.wav"));
                    break;
                case RECEIVE:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "receive.wav"));
                    break;
                case SEND:
                    stream = AudioSystem.getAudioInputStream(new File(Utils.getSoundsDirectory() + "send.wav"));
                    break;
                default:
                    throw new java.lang.IllegalArgumentException("Missing case " + sound);
                }

                // At present, ALAW and ULAW encodings must be converted
                // to PCM_SIGNED before it can be played
                AudioFormat format = stream.getFormat();
                if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                    format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                            format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                            format.getFrameRate(), true); // big endian
                    stream = AudioSystem.getAudioInputStream(format, stream);
                }

                // Create the clip
                DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                        ((int) stream.getFrameLength() * format.getFrameSize()));
                clip = (Clip) AudioSystem.getLine(info);

                // This method does not return until the audio file is completely loaded
                clip.open(stream);
                clip.drain();
                sounds.put(s, clip);
            } catch (MalformedURLException e) {
                log.error(null, e);
            } catch (IOException e) {
                log.error(null, e);
            } catch (LineUnavailableException e) {
                log.error(null, e);
            } catch (UnsupportedAudioFileException e) {
                log.error(null, e);
            } finally {
            }
        }

    }
    soundPool.execute(new Runnable() {
        @Override
        public void run() {
            Clip clip = sounds.get(sound);

            if (clip == null) {
                return;
            }

            clip.stop();
            clip.flush();
            clip.setFramePosition(0);
            clip.loop(0);
            // Wait for the sound to finish.
            //while (clip.isRunning()) {
            //    try {
            //        Thread.sleep(1);
            //    } catch (InterruptedException ex) {
            //        log.error(null, ex);
            //    }
            //}
        }
    });
}

From source file:com.limegroup.gnutella.gui.mp3.BasicPlayer.java

/**
 * Inits a DateLine.<br>/*  w w  w  .jav a  2 s.  c o m*/
 *
 * We check if the line supports Volume and Pan controls.
 *
 * From the AudioInputStream, i.e. from the sound file, we
 * fetch information about the format of the audio data. These
 * information include the sampling frequency, the number of
 * channels and the size of the samples. There information
 * are needed to ask JavaSound for a suitable output line
 * for this audio file.
 * Furthermore, we have to give JavaSound a hint about how
 * big the internal buffer for the line should be. Here,
 * we say AudioSystem.NOT_SPECIFIED, signaling that we don't
 * care about the exact size. JavaSound will use some default
 * value for the buffer size.
 */
private void createLine() throws LineUnavailableException {
    if (m_line == null) {
        AudioFormat sourceFormat = m_audioInputStream.getFormat();
        if (LOG.isDebugEnabled())
            LOG.debug("Source format : " + sourceFormat);
        AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
                sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2,
                sourceFormat.getSampleRate(), false);

        if (LOG.isDebugEnabled())
            LOG.debug("Target format: " + targetFormat);
        m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
        AudioFormat audioFormat = m_audioInputStream.getFormat();
        if (LOG.isDebugEnabled())
            LOG.debug("Create Line : " + audioFormat);
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
        m_line = (SourceDataLine) AudioSystem.getLine(info);

        /*-- Display supported controls --*/
        Control[] c = m_line.getControls();
        for (int p = 0; p < c.length; p++) {
            if (LOG.isDebugEnabled())
                LOG.debug("Controls : " + c[p].toString());
        }
        /*-- Is Gain Control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) {
            m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN);
            if (LOG.isDebugEnabled())
                LOG.debug("Master Gain Control : [" + m_gainControl.getMinimum() + ","
                        + m_gainControl.getMaximum() + "]," + m_gainControl.getPrecision());
        }

        /*-- Is Pan control supported ? --*/
        if (m_line.isControlSupported(FloatControl.Type.PAN)) {
            m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN);
            if (LOG.isDebugEnabled())
                LOG.debug("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "],"
                        + m_panControl.getPrecision());
        }
    }
}