Example usage for javax.sound.sampled AudioFormat getSampleSizeInBits

List of usage examples for javax.sound.sampled AudioFormat getSampleSizeInBits

Introduction

In this page you can find the example usage for javax.sound.sampled AudioFormat getSampleSizeInBits.

Prototype

public int getSampleSizeInBits() 

Source Link

Document

Obtains the size of a sample.

Usage

From source file:org.apache.tika.parser.audio.AudioParser.java

public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context)
        throws IOException, SAXException, TikaException {
    // AudioSystem expects the stream to support the mark feature
    if (!stream.markSupported()) {
        stream = new BufferedInputStream(stream);
    }//from www.jav  a 2s  .  c  om
    stream = new SkipFullyInputStream(stream);
    try {
        AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(stream);
        Type type = fileFormat.getType();
        if (type == Type.AIFC || type == Type.AIFF) {
            metadata.set(Metadata.CONTENT_TYPE, "audio/x-aiff");
        } else if (type == Type.AU || type == Type.SND) {
            metadata.set(Metadata.CONTENT_TYPE, "audio/basic");
        } else if (type == Type.WAVE) {
            metadata.set(Metadata.CONTENT_TYPE, "audio/vnd.wave");
        }

        AudioFormat audioFormat = fileFormat.getFormat();
        int channels = audioFormat.getChannels();
        if (channels != AudioSystem.NOT_SPECIFIED) {
            metadata.set("channels", String.valueOf(channels));
            // TODO: Use XMPDM.TRACKS? (see also frame rate in AudioFormat)
        }
        float rate = audioFormat.getSampleRate();
        if (rate != AudioSystem.NOT_SPECIFIED) {
            metadata.set("samplerate", String.valueOf(rate));
            metadata.set(XMPDM.AUDIO_SAMPLE_RATE, Integer.toString((int) rate));
        }
        int bits = audioFormat.getSampleSizeInBits();
        if (bits != AudioSystem.NOT_SPECIFIED) {
            metadata.set("bits", String.valueOf(bits));
            if (bits == 8) {
                metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "8Int");
            } else if (bits == 16) {
                metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "16Int");
            } else if (bits == 32) {
                metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "32Int");
            }
        }
        metadata.set("encoding", audioFormat.getEncoding().toString());

        // Javadoc suggests that some of the following properties might
        // be available, but I had no success in finding any:

        // "duration" Long playback duration of the file in microseconds
        // "author" String name of the author of this file
        // "title" String title of this file
        // "copyright" String copyright message
        // "date" Date date of the recording or release
        // "comment" String an arbitrary text

        addMetadata(metadata, fileFormat.properties());
        addMetadata(metadata, audioFormat.properties());
    } catch (UnsupportedAudioFileException e) {
        // There is no way to know whether this exception was
        // caused by the document being corrupted or by the format
        // just being unsupported. So we do nothing.
    }

    XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
    xhtml.startDocument();
    xhtml.endDocument();
}

From source file:org.jcodec.codecs.wav.WavHeader.java

public static WavHeader create(AudioFormat af, int size) {
    WavHeader w = emptyWavHeader();/*from ww  w  .j  av  a2s. com*/
    w.dataSize = size;
    FmtChunk fmt = new FmtChunk();
    int bitsPerSample = af.getSampleSizeInBits();
    int bytesPerSample = bitsPerSample / 8;
    int sampleRate = (int) af.getSampleRate();
    w.fmt.bitsPerSample = (short) bitsPerSample;
    w.fmt.blockAlign = (short) (af.getFrameSize());
    w.fmt.byteRate = (int) af.getFrameRate() * af.getFrameSize();
    w.fmt.numChannels = (short) af.getChannels();
    w.fmt.sampleRate = (int) af.getSampleRate();
    return w;
}

From source file:org.yccheok.jstock.chat.Utils.java

public static void playSound(final Sound sound) {
    if (sounds.size() == 0) {
        for (Sound s : Sound.values()) {
            AudioInputStream stream = null;
            Clip clip = null;/*from   ww  w .  j av a  2  s.  c  o m*/

            try {
                switch (s) {
                case ALERT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "alert.wav"));
                    break;
                case LOGIN:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "login.wav"));
                    break;
                case LOGOUT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "logout.wav"));
                    break;
                case RECEIVE:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "receive.wav"));
                    break;
                case SEND:
                    stream = AudioSystem.getAudioInputStream(new File(Utils.getSoundsDirectory() + "send.wav"));
                    break;
                default:
                    throw new java.lang.IllegalArgumentException("Missing case " + sound);
                }

                // At present, ALAW and ULAW encodings must be converted
                // to PCM_SIGNED before it can be played
                AudioFormat format = stream.getFormat();
                if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                    format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                            format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                            format.getFrameRate(), true); // big endian
                    stream = AudioSystem.getAudioInputStream(format, stream);
                }

                // Create the clip
                DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                        ((int) stream.getFrameLength() * format.getFrameSize()));
                clip = (Clip) AudioSystem.getLine(info);

                // This method does not return until the audio file is completely loaded
                clip.open(stream);
                clip.drain();
                sounds.put(s, clip);
            } catch (MalformedURLException e) {
                log.error(null, e);
            } catch (IOException e) {
                log.error(null, e);
            } catch (LineUnavailableException e) {
                log.error(null, e);
            } catch (UnsupportedAudioFileException e) {
                log.error(null, e);
            } finally {
            }
        }

    }
    soundPool.execute(new Runnable() {
        @Override
        public void run() {
            Clip clip = sounds.get(sound);

            if (clip == null) {
                return;
            }

            clip.stop();
            clip.flush();
            clip.setFramePosition(0);
            clip.loop(0);
            // Wait for the sound to finish.
            //while (clip.isRunning()) {
            //    try {
            //        Thread.sleep(1);
            //    } catch (InterruptedException ex) {
            //        log.error(null, ex);
            //    }
            //}
        }
    });
}

From source file:sx.blah.discord.api.internal.DiscordUtils.java

/**
 * Converts an {@link AudioInputStream} to 48000Hz 16 bit stereo signed Big Endian PCM format.
 *
 * @param stream The original stream./*w  w  w.  j av  a  2  s . co m*/
 * @return The PCM encoded stream.
 */
public static AudioInputStream getPCMStream(AudioInputStream stream) {
    AudioFormat baseFormat = stream.getFormat();

    //Converts first to PCM data. If the data is already PCM data, this will not change anything.
    AudioFormat toPCM = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(),
            //AudioConnection.OPUS_SAMPLE_RATE,
            baseFormat.getSampleSizeInBits() != -1 ? baseFormat.getSampleSizeInBits() : 16,
            baseFormat.getChannels(),
            //If we are given a frame size, use it. Otherwise, assume 16 bits (2 8bit shorts) per channel.
            baseFormat.getFrameSize() != -1 ? baseFormat.getFrameSize() : 2 * baseFormat.getChannels(),
            baseFormat.getFrameRate() != -1 ? baseFormat.getFrameRate() : baseFormat.getSampleRate(),
            baseFormat.isBigEndian());
    AudioInputStream pcmStream = AudioSystem.getAudioInputStream(toPCM, stream);

    //Then resamples to a sample rate of 48000hz and ensures that data is Big Endian.
    AudioFormat audioFormat = new AudioFormat(toPCM.getEncoding(), OpusUtil.OPUS_SAMPLE_RATE,
            toPCM.getSampleSizeInBits(), toPCM.getChannels(), toPCM.getFrameSize(), toPCM.getFrameRate(), true);

    return AudioSystem.getAudioInputStream(audioFormat, pcmStream);
}

From source file:xtrememp.tag.GenericInfo.java

/**
 * Load info from AudioFileFormat.//from  w  w  w . j  a v a 2 s .com
 *
 * @param aff
 * @throws javax.sound.sampled.UnsupportedAudioFileException
 */
protected void loadInfo(AudioFileFormat aff) throws UnsupportedAudioFileException {
    encodingType = aff.getType().toString();
    AudioFormat audioFormat = aff.getFormat();
    channelsAsNumber = audioFormat.getChannels();
    sampleRateAsNumber = (int) audioFormat.getSampleRate();
    bitspersample = audioFormat.getSampleSizeInBits();
    framesize = audioFormat.getFrameSize();
    bitRateAsNumber = Math.round(bitspersample * sampleRateAsNumber * channelsAsNumber / 1000);
}