Example usage for javax.sound.sampled AudioFormat getFrameSize

List of usage examples for javax.sound.sampled AudioFormat getFrameSize

Introduction

In this page you can find the example usage for javax.sound.sampled AudioFormat getFrameSize.

Prototype

public int getFrameSize() 

Source Link

Document

Obtains the frame size in bytes.

Usage

From source file:Main.java

/** Read sampled audio data from the specified URL and play it */
public static void streamSampledAudio(URL url)
        throws IOException, UnsupportedAudioFileException, LineUnavailableException {
    AudioInputStream ain = null; // We read audio data from here
    SourceDataLine line = null; // And write it here.

    try {/* w w w  . ja v a2 s  .com*/
        // Get an audio input stream from the URL
        ain = AudioSystem.getAudioInputStream(url);

        // Get information about the format of the stream
        AudioFormat format = ain.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

        // If the format is not supported directly (i.e. if it is not PCM
        // encoded, then try to transcode it to PCM.
        if (!AudioSystem.isLineSupported(info)) {
            // This is the PCM format we want to transcode to.
            // The parameters here are audio format details that you
            // shouldn't need to understand for casual use.
            AudioFormat pcm = new AudioFormat(format.getSampleRate(), 16, format.getChannels(), true, false);

            // Get a wrapper stream around the input stream that does the
            // transcoding for us.
            ain = AudioSystem.getAudioInputStream(pcm, ain);

            // Update the format and info variables for the transcoded data
            format = ain.getFormat();
            info = new DataLine.Info(SourceDataLine.class, format);
        }

        // Open the line through which we'll play the streaming audio.
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format);

        // Allocate a buffer for reading from the input stream and writing
        // to the line. Make it large enough to hold 4k audio frames.
        // Note that the SourceDataLine also has its own internal buffer.
        int framesize = format.getFrameSize();
        byte[] buffer = new byte[4 * 1024 * framesize]; // the buffer
        int numbytes = 0; // how many bytes

        // We haven't started the line yet.
        boolean started = false;

        for (;;) { // We'll exit the loop when we reach the end of stream
            // First, read some bytes from the input stream.
            int bytesread = ain.read(buffer, numbytes, buffer.length - numbytes);
            // If there were no more bytes to read, we're done.
            if (bytesread == -1)
                break;
            numbytes += bytesread;

            // Now that we've got some audio data, to write to the line,
            // start the line, so it will play that data as we write it.
            if (!started) {
                line.start();
                started = true;
            }

            // We must write bytes to the line in an integer multiple of
            // the framesize. So figure out how many bytes we'll write.
            int bytestowrite = (numbytes / framesize) * framesize;

            // Now write the bytes. The line will buffer them and play
            // them. This call will block until all bytes are written.
            line.write(buffer, 0, bytestowrite);

            // If we didn't have an integer multiple of the frame size,
            // then copy the remaining bytes to the start of the buffer.
            int remaining = numbytes - bytestowrite;
            if (remaining > 0)
                System.arraycopy(buffer, bytestowrite, buffer, 0, remaining);
            numbytes = remaining;
        }

        // Now block until all buffered sound finishes playing.
        line.drain();
    } finally { // Always relinquish the resources we use
        if (line != null)
            line.close();
        if (ain != null)
            ain.close();
    }
}

From source file:tvbrowser.extras.reminderplugin.ReminderPlugin.java

/**
 * Plays a sound.//from w w  w.ja  va2  s .  c om
 *
 * @param fileName
 *          The file name of the sound to play.
 * @return The sound Object.
 */
public static Object playSound(final String fileName) {
    try {
        if (StringUtils.endsWithIgnoreCase(fileName, ".mid")) {
            final Sequencer sequencer = MidiSystem.getSequencer();
            sequencer.open();

            final InputStream midiFile = new FileInputStream(fileName);
            sequencer.setSequence(MidiSystem.getSequence(midiFile));

            sequencer.start();

            new Thread("Reminder MIDI sequencer") {
                @Override
                public void run() {
                    setPriority(Thread.MIN_PRIORITY);
                    while (sequencer.isRunning()) {
                        try {
                            Thread.sleep(100);
                        } catch (Exception ee) {
                            // ignore
                        }
                    }

                    try {
                        sequencer.close();
                        midiFile.close();
                    } catch (Exception ee) {
                        // ignore
                    }
                }
            }.start();

            return sequencer;
        } else {
            final AudioInputStream ais = AudioSystem.getAudioInputStream(new File(fileName));

            final AudioFormat format = ais.getFormat();
            final DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

            if (AudioSystem.isLineSupported(info)) {
                final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);

                line.open(format);
                line.start();

                new Thread("Reminder audio playing") {
                    private boolean stopped;

                    @Override
                    public void run() {
                        byte[] myData = new byte[1024 * format.getFrameSize()];
                        int numBytesToRead = myData.length;
                        int numBytesRead = 0;
                        int total = 0;
                        int totalToRead = (int) (format.getFrameSize() * ais.getFrameLength());
                        stopped = false;

                        line.addLineListener(new LineListener() {
                            public void update(LineEvent event) {
                                if (line != null && !line.isRunning()) {
                                    stopped = true;
                                    line.close();
                                    try {
                                        ais.close();
                                    } catch (Exception ee) {
                                        // ignore
                                    }
                                }
                            }
                        });

                        try {
                            while (total < totalToRead && !stopped) {
                                numBytesRead = ais.read(myData, 0, numBytesToRead);

                                if (numBytesRead == -1) {
                                    break;
                                }

                                total += numBytesRead;
                                line.write(myData, 0, numBytesRead);
                            }
                        } catch (Exception e) {
                        }

                        line.drain();
                        line.stop();
                    }
                }.start();

                return line;
            } else {
                URL url = new File(fileName).toURI().toURL();
                AudioClip clip = Applet.newAudioClip(url);
                clip.play();
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
        if ((new File(fileName)).isFile()) {
            URL url;
            try {
                url = new File(fileName).toURI().toURL();
                AudioClip clip = Applet.newAudioClip(url);
                clip.play();
            } catch (MalformedURLException e1) {
            }
        } else {
            String msg = mLocalizer.msg("error.1", "Error loading reminder sound file!\n({0})", fileName);
            JOptionPane.showMessageDialog(UiUtilities.getBestDialogParent(MainFrame.getInstance()), msg,
                    Localizer.getLocalization(Localizer.I18N_ERROR), JOptionPane.ERROR_MESSAGE);
        }
    }
    return null;
}

From source file:edu.tsinghua.lumaqq.Sounder.java

/**
 * /*  www .  jav a 2  s .  c om*/
 * @param filename
 * @return
 */
private boolean loadSound(String filename) {
    // ??
    File file = new File(filename);
    try {
        currentSound = AudioSystem.getAudioInputStream(file);
    } catch (Exception e) {
        try {
            FileInputStream is = new FileInputStream(file);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    // ??????
    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            // ?? ALAW/ULAW ?  ALAW/ULAW ?? PCM                
            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.open(stream);
            currentSound = clip;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }
            log.trace("Sequence Created");
        } catch (InvalidMidiDataException imde) {
            log.error("???");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    return true;
}

From source file:SimpleSoundPlayer.java

public boolean loadSound(Object object) {
    duration = 0.0;//w ww. j  a v a2  s  .co  m

    currentName = ((File) object).getName();
    try {
        currentSound = AudioSystem.getAudioInputStream((File) object);
    } catch (Exception e1) {
        try {
            FileInputStream is = new FileInputStream((File) object);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception e3) {
            e3.printStackTrace();
            currentSound = null;
            return false;
        }
        // }
    }

    // user pressed stop or changed tabs while loading
    if (sequencer == null) {
        currentSound = null;
        return false;
    }

    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            /**
             * we can't yet open the device for ALAW/ULAW playback, convert
             * ALAW/ULAW to PCM
             */

            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.addLineListener(this);
            clip.open(stream);
            currentSound = clip;
            // seekSlider.setMaximum((int) stream.getFrameLength());
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }

        } catch (InvalidMidiDataException imde) {
            System.out.println("Unsupported audio file.");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    }

    duration = getDuration();

    return true;
}

From source file:com.arkatay.yada.codec.AudioRecorder.java

/**
 * Creates a new instance of AudioRecorder with an audio format that the
 * input voice data is using.//from   w  w  w  .  ja v  a  2  s .c  om
 *
 * @param  audioFormat the audioFormat used by the recorded voice data
 */
public AudioRecorder(AudioFormat audioFormat, boolean injectSilence) {
    // Create a logger for this class
    log = LogFactory.getLog(getClass());

    this.audioFormat = audioFormat;
    this.injectSilence = injectSilence;

    hashInteger = new HashInteger();
    recorderMap = new HashMap<HashInteger, AudioChannelRecorder>(32);
    silenceBuffer = new byte[SILENCE_BUFFER_SIZE];

    // Fill silence buffer
    for (int i = 0; i < SILENCE_BUFFER_SIZE; i++) {
        silenceBuffer[i] = 0;
    }

    // Calculate nbr samples per millisecond
    samplesPerMillisecond = (int) (audioFormat.getFrameRate() * audioFormat.getFrameSize() / 1000);
}

From source file:com.opensmile.maven.EmoRecService.java

private double getAudioDuration(String filename) {
    File file = new File(filename);
    AudioInputStream audioInputStream;
    float durationInSeconds = 0;
    try {//w  w  w  .ja va  2  s .  c  o m
        audioInputStream = AudioSystem.getAudioInputStream(file);
        AudioFormat format = audioInputStream.getFormat();
        long audioFileLength = file.length();
        int frameSize = format.getFrameSize();
        float frameRate = format.getFrameRate();
        durationInSeconds = (audioFileLength / (frameSize * frameRate));
    } catch (UnsupportedAudioFileException | IOException e) {
        e.printStackTrace();
    }

    return durationInSeconds;
}

From source file:com.player.BasicMP3Player.java

/**
 * Inits AudioInputStream and AudioFileFormat from the data source.
 * //  w  w  w.  j  av a2  s .co  m
 * @throws BasicPlayerException
 */
private void initAudioInputStream() throws BasicPlayerException {
    try {
        reset();
        notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
        if (m_dataSource instanceof URL) {
            initAudioInputStream((URL) m_dataSource);
        } else if (m_dataSource instanceof File) {
            initAudioInputStream((File) m_dataSource);
        } else if (m_dataSource instanceof InputStream) {
            initAudioInputStream((InputStream) m_dataSource);
        }
        createLine();
        // Notify listeners with AudioFileFormat properties.
        Map properties = null;
        if (m_audioFileFormat instanceof TAudioFileFormat) {
            // Tritonus SPI compliant audio file format.
            properties = ((TAudioFileFormat) m_audioFileFormat).properties();
            // Clone the Map because it is not mutable.
            properties = deepCopy(properties);
        } else
            properties = new HashMap();
        // Add JavaSound properties.
        if (m_audioFileFormat.getByteLength() > 0)
            properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
        if (m_audioFileFormat.getFrameLength() > 0)
            properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
        if (m_audioFileFormat.getType() != null)
            properties.put("audio.type", (m_audioFileFormat.getType().toString()));
        // Audio format.
        AudioFormat audioFormat = m_audioFileFormat.getFormat();
        if (audioFormat.getFrameRate() > 0)
            properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
        if (audioFormat.getFrameSize() > 0)
            properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
        if (audioFormat.getSampleRate() > 0)
            properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
        if (audioFormat.getSampleSizeInBits() > 0)
            properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
        if (audioFormat.getChannels() > 0)
            properties.put("audio.channels", new Integer(audioFormat.getChannels()));
        if (audioFormat instanceof TAudioFormat) {
            // Tritonus SPI compliant audio format.
            Map addproperties = ((TAudioFormat) audioFormat).properties();
            properties.putAll(addproperties);
        }
        Iterator it = m_listeners.iterator();
        while (it.hasNext()) {
            BasicPlayerListener bpl = (BasicPlayerListener) it.next();
            bpl.opened(m_dataSource, properties);
        }
        m_status = OPENED;
        notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
    } catch (LineUnavailableException e) {
        throw new BasicPlayerException(e);
    } catch (UnsupportedAudioFileException e) {
        throw new BasicPlayerException(e);
    } catch (IOException e) {
        throw new BasicPlayerException(e);
    }
}

From source file:BasicPlayer.java

/**
 * Inits AudioInputStream and AudioFileFormat from the data source.
 * @throws BasicPlayerException/*from w  w  w .ja  v a 2  s  .  c  om*/
 */
protected void initAudioInputStream() throws BasicPlayerException {
    try {
        reset();
        notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
        if (m_dataSource instanceof URL) {
            initAudioInputStream((URL) m_dataSource);
        } else if (m_dataSource instanceof File) {
            initAudioInputStream((File) m_dataSource);
        } else if (m_dataSource instanceof InputStream) {
            initAudioInputStream((InputStream) m_dataSource);
        }
        createLine();
        // Notify listeners with AudioFileFormat properties.
        Map properties = null;
        if (m_audioFileFormat instanceof TAudioFileFormat) {
            // Tritonus SPI compliant audio file format.
            properties = ((TAudioFileFormat) m_audioFileFormat).properties();
            // Clone the Map because it is not mutable.
            properties = deepCopy(properties);
        } else
            properties = new HashMap();
        // Add JavaSound properties.
        if (m_audioFileFormat.getByteLength() > 0)
            properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
        if (m_audioFileFormat.getFrameLength() > 0)
            properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
        if (m_audioFileFormat.getType() != null)
            properties.put("audio.type", (m_audioFileFormat.getType().toString()));
        // Audio format.
        AudioFormat audioFormat = m_audioFileFormat.getFormat();
        if (audioFormat.getFrameRate() > 0)
            properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
        if (audioFormat.getFrameSize() > 0)
            properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
        if (audioFormat.getSampleRate() > 0)
            properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
        if (audioFormat.getSampleSizeInBits() > 0)
            properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
        if (audioFormat.getChannels() > 0)
            properties.put("audio.channels", new Integer(audioFormat.getChannels()));
        if (audioFormat instanceof TAudioFormat) {
            // Tritonus SPI compliant audio format.
            Map addproperties = ((TAudioFormat) audioFormat).properties();
            properties.putAll(addproperties);
        }
        // Add SourceDataLine
        properties.put("basicplayer.sourcedataline", m_line);
        Iterator it = m_listeners.iterator();
        while (it.hasNext()) {
            BasicPlayerListener bpl = (BasicPlayerListener) it.next();
            bpl.opened(m_dataSource, properties);
        }
        m_status = OPENED;
        notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
    } catch (LineUnavailableException e) {
        throw new BasicPlayerException(e);
    } catch (UnsupportedAudioFileException e) {
        throw new BasicPlayerException(e);
    } catch (IOException e) {
        throw new BasicPlayerException(e);
    }
}

From source file:com.arkatay.yada.codec.AudioEncoder.java

public void startModule(TargetDataLine inputLine, int audioFormatIndex) throws LineUnavailableException {
    capturedFrameSizeInNanos = 20L * millisToNanos;

    if (state != STATE_OFF)
        throw new IllegalStateException("Trying to re-start the encoder");

    // Check bounds
    AudioFormat[] audioFormats = getSupportedAudioFormats();
    if (audioFormatIndex < 0 || audioFormatIndex >= audioFormats.length)
        throw new LineUnavailableException("Audio format array out of bounds");

    // Get format
    AudioFormat audioFormat = audioFormats[audioFormatIndex];

    // Create line if created internally
    if (inputLine == null) {
        inputLine = createLine(audioFormat);
    }//from   w w w.  j  a v  a2s  .co  m

    // Validate the audio format if external
    else if (!audioFormat.matches(inputLine.getFormat())) {
        throw new LineUnavailableException("Audio format not supported");
    }

    this.inputLine = inputLine;
    this.audioFormatIndex = audioFormatIndex;

    // Call init on the sub-class implementation
    init();

    // Calculate stuff
    capturedFrameSizeInBytes = (int) (audioFormat.getFrameRate() * audioFormat.getFrameSize()
            * capturedFrameSizeInNanos / (1000 * millisToNanos));
    diffTimeNanosLimit = diffTimeMillisLimit * millisToNanos;

    // Open the input line, the wanted buffer size is N times as big as the frame size
    inputLine.open(audioFormat, 4 * capturedFrameSizeInBytes);
    inputLineBufferSize = inputLine.getBufferSize();
    log.debug("Input line is open with buffer size " + inputLineBufferSize);

    // Create a buffer for the captured frame
    captureBuffer = new byte[capturedFrameSizeInBytes];

    // Go to state idle
    state = STATE_IDLE;

    // Start the capturing thread, it will block until startProcessing is called
    thread.start();
}