Example usage for javax.sound.sampled AudioFormat getFrameSize

List of usage examples for javax.sound.sampled AudioFormat getFrameSize

Introduction

In this page you can find the example usage for javax.sound.sampled AudioFormat getFrameSize.

Prototype

public int getFrameSize() 

Source Link

Document

Obtains the frame size in bytes.

Usage

From source file:Main.java

public static void main(String[] argv) throws Exception {
    AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile"));

    // From URL//from   w w w.  java  2 s . com
    // stream = AudioSystem.getAudioInputStream(new URL(
    // "http://hostname/audiofile"));

    AudioFormat format = stream.getFormat();
    if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
        format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                format.getFrameRate(), true); // big endian
        stream = AudioSystem.getAudioInputStream(format, stream);
    }

    DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
            ((int) stream.getFrameLength() * format.getFrameSize()));
    Clip clip = (Clip) AudioSystem.getLine(info);

    clip.open(stream);

    clip.start();
}

From source file:Main.java

public static void main(String[] argv) throws Exception {
    AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile"));
    //    stream = AudioSystem.getAudioInputStream(new URL(
    //      "http://hostname/audiofile"));

    AudioFormat format = stream.getFormat();
    if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
        format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                format.getFrameRate(), true); // big endian
        stream = AudioSystem.getAudioInputStream(format, stream);
    }/*w w w .  j a va2s. c  o  m*/

    SourceDataLine.Info info = new DataLine.Info(SourceDataLine.class, stream.getFormat(),
            ((int) stream.getFrameLength() * format.getFrameSize()));
    SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
    line.open(stream.getFormat());
    line.start();

    int numRead = 0;
    byte[] buf = new byte[line.getBufferSize()];
    while ((numRead = stream.read(buf, 0, buf.length)) >= 0) {
        int offset = 0;
        while (offset < numRead) {
            offset += line.write(buf, offset, numRead - offset);
        }
    }
    line.drain();
    line.stop();
}

From source file:Main.java

public static void main(String args[]) throws Exception {
    final ByteArrayOutputStream out = new ByteArrayOutputStream();
    float sampleRate = 8000;
    int sampleSizeInBits = 8;
    int channels = 1;
    boolean signed = true;
    boolean bigEndian = true;
    final AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
    DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
    final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
    line.open(format);//w w w.ja  v a2  s.c o m
    line.start();
    Runnable runner = new Runnable() {
        int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

        byte buffer[] = new byte[bufferSize];

        public void run() {
            try {

                int count = line.read(buffer, 0, buffer.length);
                if (count > 0) {
                    out.write(buffer, 0, count);
                }

                out.close();
            } catch (IOException e) {
                System.err.println("I/O problems: " + e);
                System.exit(-1);
            }
        }
    };
    Thread captureThread = new Thread(runner);
    captureThread.start();

    byte audio[] = out.toByteArray();
    InputStream input = new ByteArrayInputStream(audio);
    final SourceDataLine line1 = (SourceDataLine) AudioSystem.getLine(info);
    final AudioInputStream ais = new AudioInputStream(input, format, audio.length / format.getFrameSize());
    line1.open(format);
    line1.start();

    runner = new Runnable() {
        int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

        byte buffer[] = new byte[bufferSize];

        public void run() {
            try {
                int count;
                while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
                    if (count > 0) {
                        line1.write(buffer, 0, count);
                    }
                }
                line1.drain();
                line1.close();
            } catch (IOException e) {
                System.err.println("I/O problems: " + e);
                System.exit(-3);
            }
        }
    };
    Thread playThread = new Thread(runner);
    playThread.start();

}

From source file:org.yccheok.jstock.chat.Utils.java

public static void playSound(final Sound sound) {
    if (sounds.size() == 0) {
        for (Sound s : Sound.values()) {
            AudioInputStream stream = null;
            Clip clip = null;/*from   www .j av  a2  s.  co m*/

            try {
                switch (s) {
                case ALERT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "alert.wav"));
                    break;
                case LOGIN:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "login.wav"));
                    break;
                case LOGOUT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "logout.wav"));
                    break;
                case RECEIVE:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "receive.wav"));
                    break;
                case SEND:
                    stream = AudioSystem.getAudioInputStream(new File(Utils.getSoundsDirectory() + "send.wav"));
                    break;
                default:
                    throw new java.lang.IllegalArgumentException("Missing case " + sound);
                }

                // At present, ALAW and ULAW encodings must be converted
                // to PCM_SIGNED before it can be played
                AudioFormat format = stream.getFormat();
                if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                    format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                            format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                            format.getFrameRate(), true); // big endian
                    stream = AudioSystem.getAudioInputStream(format, stream);
                }

                // Create the clip
                DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                        ((int) stream.getFrameLength() * format.getFrameSize()));
                clip = (Clip) AudioSystem.getLine(info);

                // This method does not return until the audio file is completely loaded
                clip.open(stream);
                clip.drain();
                sounds.put(s, clip);
            } catch (MalformedURLException e) {
                log.error(null, e);
            } catch (IOException e) {
                log.error(null, e);
            } catch (LineUnavailableException e) {
                log.error(null, e);
            } catch (UnsupportedAudioFileException e) {
                log.error(null, e);
            } finally {
            }
        }

    }
    soundPool.execute(new Runnable() {
        @Override
        public void run() {
            Clip clip = sounds.get(sound);

            if (clip == null) {
                return;
            }

            clip.stop();
            clip.flush();
            clip.setFramePosition(0);
            clip.loop(0);
            // Wait for the sound to finish.
            //while (clip.isRunning()) {
            //    try {
            //        Thread.sleep(1);
            //    } catch (InterruptedException ex) {
            //        log.error(null, ex);
            //    }
            //}
        }
    });
}

From source file:com.andrewkroh.cicso.rtp.AudioFileStreamer.java

/**
 * Utility method to convert an {@link AudioFormat} object to a String.
 * {@code AudioFormat} does implement a toString method, but it's output
 * varies depending upon the contents. I find it more useful to always print
 * the value of all fields.//w w w. j av a2s .  c o m
 *
 * @param format
 *            {@code AudioFormat} to convert to a String
 * @return {@code AudioFormat} object as a String
 */
private static String audioFormatToString(AudioFormat format) {
    return new ToStringBuilder(format).append("encoding", format.getEncoding())
            .append("sampleRate", format.getSampleRate())
            .append("sampleSizeInBits", format.getSampleSizeInBits()).append("channels", format.getChannels())
            .append("frameSize", format.getFrameSize()).append("frameRate", format.getFrameRate())
            .append("isBigEndian", format.isBigEndian()).toString();
}

From source file:org.jcodec.codecs.wav.WavHeader.java

public static WavHeader create(AudioFormat af, int size) {
    WavHeader w = emptyWavHeader();/*  w w w . ja  v  a2 s  .  c o m*/
    w.dataSize = size;
    FmtChunk fmt = new FmtChunk();
    int bitsPerSample = af.getSampleSizeInBits();
    int bytesPerSample = bitsPerSample / 8;
    int sampleRate = (int) af.getSampleRate();
    w.fmt.bitsPerSample = (short) bitsPerSample;
    w.fmt.blockAlign = (short) (af.getFrameSize());
    w.fmt.byteRate = (int) af.getFrameRate() * af.getFrameSize();
    w.fmt.numChannels = (short) af.getChannels();
    w.fmt.sampleRate = (int) af.getSampleRate();
    return w;
}

From source file:iristk.speech.nuance9.BaseRecognizer.java

public static String getEncoding(AudioFormat format) throws IllegalArgumentException {
    if (format.getFrameRate() != 8000 && format.getFrameRate() != 16000)
        throw new IllegalArgumentException("Can only process 8khz or 16khz");
    if (format.isBigEndian())
        throw new IllegalArgumentException("Can only process little-endian");
    if (format.getChannels() != 1)
        throw new IllegalArgumentException("Can only process mono sound");
    if (format.getEncoding() == Encoding.ULAW)
        return "audio/basic;rate=8000";
    else if (format.getEncoding() == Encoding.PCM_SIGNED) {
        if (format.getFrameSize() != 2)
            throw new IllegalArgumentException("Can only process 16 bit PCM sound");
        return "audio/L16;rate=8000";
    } else/*from  ww w .  ja  v  a  2 s .  c o  m*/
        throw new IllegalArgumentException("Bad audio encoding: " + format.getEncoding());
}

From source file:xtrememp.tag.GenericInfo.java

/**
 * Load info from AudioFileFormat.//from w ww. j a v  a 2s  .com
 *
 * @param aff
 * @throws javax.sound.sampled.UnsupportedAudioFileException
 */
protected void loadInfo(AudioFileFormat aff) throws UnsupportedAudioFileException {
    encodingType = aff.getType().toString();
    AudioFormat audioFormat = aff.getFormat();
    channelsAsNumber = audioFormat.getChannels();
    sampleRateAsNumber = (int) audioFormat.getSampleRate();
    bitspersample = audioFormat.getSampleSizeInBits();
    framesize = audioFormat.getFrameSize();
    bitRateAsNumber = Math.round(bitspersample * sampleRateAsNumber * channelsAsNumber / 1000);
}

From source file:it.sardegnaricerche.voiceid.sr.VCluster.java

public void trimSegments(File inputFile) throws IOException {
    String base = Utils.getBasename(inputFile);
    File mydir = new File(base);
    mydir.mkdirs();/*from w w  w . j  a  va2s .c  o m*/
    String mywav = mydir.getAbsolutePath() + "/" + this.getLabel() + ".wav";
    AudioFileFormat fileFormat = null;
    AudioInputStream inputStream = null;
    AudioInputStream shortenedStream = null;
    AudioInputStream current = null;
    int bytesPerSecond = 0;
    long framesOfAudioToCopy = 0;
    wavFile = new File(mywav);
    try {
        fileFormat = AudioSystem.getAudioFileFormat(inputFile);
        AudioFormat format = fileFormat.getFormat();
        boolean firstTime = true;

        for (VSegment s : this.getSegments()) {
            bytesPerSecond = format.getFrameSize() * (int) format.getFrameRate();
            inputStream = AudioSystem.getAudioInputStream(inputFile);
            inputStream.skip(0);
            inputStream.skip((int) (s.getStart() * 100) * bytesPerSecond / 100);
            framesOfAudioToCopy = (int) (s.getDuration() * 100) * (int) format.getFrameRate() / 100;

            if (firstTime) {
                shortenedStream = new AudioInputStream(inputStream, format, framesOfAudioToCopy);
            } else {
                current = new AudioInputStream(inputStream, format, framesOfAudioToCopy);
                shortenedStream = new AudioInputStream(new SequenceInputStream(shortenedStream, current),
                        format, shortenedStream.getFrameLength() + framesOfAudioToCopy);
            }
            firstTime = false;
        }
        AudioSystem.write(shortenedStream, fileFormat.getType(), wavFile);
    } catch (Exception e) {
        logger.severe(e.getMessage());
        e.printStackTrace();
    } finally {
        if (inputStream != null)
            try {
                inputStream.close();
            } catch (Exception e) {
                logger.severe(e.getMessage());
            }
        if (shortenedStream != null)
            try {
                shortenedStream.close();
            } catch (Exception e) {
                logger.severe(e.getMessage());
            }
        if (current != null)
            try {
                current.close();
            } catch (Exception e) {
                logger.severe(e.getMessage());
            }
    }
    logger.fine("filename: " + wavFile.getAbsolutePath());
}

From source file:sx.blah.discord.api.internal.DiscordUtils.java

/**
 * Converts an {@link AudioInputStream} to 48000Hz 16 bit stereo signed Big Endian PCM format.
 *
 * @param stream The original stream.// w  w w  .java2  s. com
 * @return The PCM encoded stream.
 */
public static AudioInputStream getPCMStream(AudioInputStream stream) {
    AudioFormat baseFormat = stream.getFormat();

    //Converts first to PCM data. If the data is already PCM data, this will not change anything.
    AudioFormat toPCM = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(),
            //AudioConnection.OPUS_SAMPLE_RATE,
            baseFormat.getSampleSizeInBits() != -1 ? baseFormat.getSampleSizeInBits() : 16,
            baseFormat.getChannels(),
            //If we are given a frame size, use it. Otherwise, assume 16 bits (2 8bit shorts) per channel.
            baseFormat.getFrameSize() != -1 ? baseFormat.getFrameSize() : 2 * baseFormat.getChannels(),
            baseFormat.getFrameRate() != -1 ? baseFormat.getFrameRate() : baseFormat.getSampleRate(),
            baseFormat.isBigEndian());
    AudioInputStream pcmStream = AudioSystem.getAudioInputStream(toPCM, stream);

    //Then resamples to a sample rate of 48000hz and ensures that data is Big Endian.
    AudioFormat audioFormat = new AudioFormat(toPCM.getEncoding(), OpusUtil.OPUS_SAMPLE_RATE,
            toPCM.getSampleSizeInBits(), toPCM.getChannels(), toPCM.getFrameSize(), toPCM.getFrameRate(), true);

    return AudioSystem.getAudioInputStream(audioFormat, pcmStream);
}