Example usage for javax.sound.sampled AudioSystem getAudioInputStream

List of usage examples for javax.sound.sampled AudioSystem getAudioInputStream

Introduction

In this page you can find the example usage for javax.sound.sampled AudioSystem getAudioInputStream.

Prototype

public static AudioInputStream getAudioInputStream(final File file)
        throws UnsupportedAudioFileException, IOException 

Source Link

Document

Obtains an audio input stream from the provided File .

Usage

From source file:fr.ritaly.dungeonmaster.audio.SoundSystemV1.java

public synchronized void init(final File directory)
        throws IOException, UnsupportedAudioFileException, LineUnavailableException {

    Validate.isTrue(directory != null, "The given directory is null");
    Validate.isTrue(directory.exists(),/* ww  w  .ja  v a  2  s  . c  o m*/
            "The given directory <" + directory.getAbsolutePath() + "> doesn't exist");
    Validate.isTrue(directory.isDirectory(),
            "The given path <" + directory.getAbsolutePath() + "> doesn't denote a directory");

    if (initialized) {
        throw new IllegalStateException("The sound system is already initialized");
    }

    if (log.isDebugEnabled()) {
        log.debug("Initializing sound system ...");
    }

    // Lister les fichiers du rpertoire
    File[] files = directory.listFiles();

    for (File file : files) {
        final AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(file);

        final AudioFormat format = audioInputStream.getFormat();

        final ByteArrayOutputStream outputStream = new ByteArrayOutputStream((int) file.length());

        final byte[] buffer = new byte[4096];

        int count = -1;

        while ((count = audioInputStream.read(buffer)) > 0) {
            outputStream.write(buffer, 0, count);
        }

        final Sound sound = new Sound(outputStream.toByteArray(), format);

        sounds.put(file.getName(), sound);
    }

    this.executorService = Executors.newFixedThreadPool(4);

    if (log.isInfoEnabled()) {
        log.info("Sound system initialized");
    }

    initialized = true;
}

From source file:sec_algo.aud_sec.java

public BufferedWriter getAudioStream() {
    FileInputStream fin = null;//ww w.j av a 2  s .c  o m
    BufferedWriter audstream = null;

    try {
        fin = new FileInputStream(this.file);
        //           audstream = new BufferedWriter(new FileWriter(returnFileName()+"_ex."+returnFileExt()));
        //           byte contents[] = new byte[100];
        //           while(fin.read(contents)!= -1){
        //               System.out.println("reading & writing from file");
        //               for(byte b : contents)
        //                   for(int x = 0; x < 8; x++)
        //                       audstream.write(b>>x & 1);
        //           }
        //           System.out.println("Finished!");
        //           System.out.println("audstream contents: " + audstream.toString());
        byte[] header = new byte[8];
        fin.read(header);
        fin.close();
        //           System.out.println("header bytes: " + Arrays.toString(header));
        ArrayList<String> bitstring = new ArrayList<String>();
        for (int i = 0; i < header.length; i++)
            bitstring.add(String.format("%8s", Integer.toBinaryString(header[i] & 0xFF)).replace(' ', '0'));
        System.out.print("bit input: [/");
        for (int i = 0; i < bitstring.size(); i++) {
            System.out.print(bitstring.get(i) + " ");
        }
        System.out.println("]/");

        System.out.println(bitstring.get(0) + " " + bitstring.get(1) + " " + bitstring.get(2));
        System.out.println("Bitrate index: " + bitstring.get(2).substring(0, 4));

        AudioInputStream in = AudioSystem.getAudioInputStream(this.file);
        AudioInputStream din = null;
        AudioFormat baseFormat = in.getFormat();
        AudioFormat decodedFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(),
                getBitrate(bitstring.get(2).substring(0, 4)), baseFormat.getChannels(),
                baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
        din = AudioSystem.getAudioInputStream(decodedFormat, in);
        int size = din.available();
        byte[] bytaud = new byte[size];
        din.read(bytaud);
        bitstring = new ArrayList<String>();
        for (int i = 0; i < header.length; i++)
            bitstring.add(String.format("%8s", Integer.toBinaryString(header[i] & 0xFF)).replace(' ', '0'));
        System.out.print("bit input: [/");
        for (int i = 0; i < bitstring.size(); i++) {
            System.out.print(bitstring.get(i) + " ");
        }
        System.out.println("]/");
        in.close();
        din.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
    return audstream;
}

From source file:SimpleSoundPlayer.java

public boolean loadSound(Object object) {
    duration = 0.0;/*from   w  w  w.j a  va  2s.c om*/

    currentName = ((File) object).getName();
    try {
        currentSound = AudioSystem.getAudioInputStream((File) object);
    } catch (Exception e1) {
        try {
            FileInputStream is = new FileInputStream((File) object);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception e3) {
            e3.printStackTrace();
            currentSound = null;
            return false;
        }
        // }
    }

    // user pressed stop or changed tabs while loading
    if (sequencer == null) {
        currentSound = null;
        return false;
    }

    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            /**
             * we can't yet open the device for ALAW/ULAW playback, convert
             * ALAW/ULAW to PCM
             */

            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.addLineListener(this);
            clip.open(stream);
            currentSound = clip;
            // seekSlider.setMaximum((int) stream.getFrameLength());
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }

        } catch (InvalidMidiDataException imde) {
            System.out.println("Unsupported audio file.");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            ex.printStackTrace();
            currentSound = null;
            return false;
        }
    }

    duration = getDuration();

    return true;
}

From source file:fr.ritaly.dungeonmaster.audio.SoundSystem.java

public synchronized void init(final File directory)
        throws IOException, UnsupportedAudioFileException, LineUnavailableException {

    Validate.isTrue(directory != null, "The given directory is null");
    Validate.isTrue(directory.exists(),/*from w w  w. j a va  2 s  .co m*/
            "The given directory <" + directory.getAbsolutePath() + "> doesn't exist");
    Validate.isTrue(directory.isDirectory(),
            "The given path <" + directory.getAbsolutePath() + "> doesn't denote a directory");

    if (initialized) {
        throw new IllegalStateException("The sound system is already initialized");
    }

    if (log.isDebugEnabled()) {
        log.debug("Initializing sound system ...");
    }

    // Lister les fichiers du rpertoire
    File[] files = directory.listFiles();

    for (File file : files) {
        final AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(file);

        final AudioFormat format = audioInputStream.getFormat();

        final ByteArrayOutputStream outputStream = new ByteArrayOutputStream((int) file.length());

        final byte[] buffer = new byte[4096];

        int count = -1;

        while ((count = audioInputStream.read(buffer)) > 0) {
            outputStream.write(buffer, 0, count);
        }

        final Sound sound = new Sound(outputStream.toByteArray(), format);

        sounds.put(file.getName(), sound);
    }

    // On peut jouer au maximum 4 sons en mme temps
    this.executorService = Executors.newFixedThreadPool(4);

    if (log.isInfoEnabled()) {
        log.info("Sound system initialized");
    }

    initialized = true;
}

From source file:net.dv8tion.jda.audio.player.URLPlayer.java

public void setAudioUrl(URL urlOfResource, int bufferSize) throws IOException, UnsupportedAudioFileException {
    if (urlOfResource == null)
        throw new IllegalArgumentException(
                "A null URL was provided to the Player! Cannot find resource to play from a null URL!");

    this.urlOfResource = urlOfResource;
    URLConnection conn = null;/*from  w  ww. j  a  v a2 s. c om*/
    HttpHost jdaProxy = api.getGlobalProxy();
    if (jdaProxy != null) {
        InetSocketAddress proxyAddress = new InetSocketAddress(jdaProxy.getHostName(), jdaProxy.getPort());
        Proxy proxy = new Proxy(Proxy.Type.HTTP, proxyAddress);
        conn = urlOfResource.openConnection(proxy);
    } else {
        conn = urlOfResource.openConnection();
    }
    if (conn == null)
        throw new IllegalArgumentException(
                "The provided URL resulted in a null URLConnection! Does the resource exist?");

    conn.setRequestProperty("user-agent", userAgent);
    this.resourceStream = conn.getInputStream();
    bufferedResourceStream = new BufferedInputStream(resourceStream, bufferSize);
    setAudioSource(AudioSystem.getAudioInputStream(bufferedResourceStream));
}

From source file:org.sipfoundry.voicemail.Message.java

public long getDuration() {
    // Calculate the duration (in seconds) from the Wav file
    File wavFile = getWavFile();//from  ww  w  . j av a2  s .  c om
    if (wavFile != null) {
        try {
            AudioInputStream ais = AudioSystem.getAudioInputStream(wavFile);
            float secs = ais.getFrameLength() / ais.getFormat().getFrameRate();
            m_duration = Math.round(secs); // Round up.
        } catch (EOFException e) {
            m_duration = 0;
        } catch (Exception e) {
            String trouble = "Message::getDuration Problem determining duration of " + getWavFile().getPath();
            LOG.error(trouble, e);
            throw new RuntimeException(trouble, e);
        }
    }
    return m_duration;
}

From source file:com.andrewkroh.cicso.rtp.AudioFileStreamer.java

/**
 * Constructs a new AudioFileStreamer whose source data will be read from
 * the specified URL. The encoding on the output stream will be the
 * specified {@code outputEncoding} value. Each RTP packet will contain the
 * corresponding number of samples to represent the amount of time given in
 * {@code outputPacketLengthMs}.// w  w w  .  j  av a  2s  .  co m
 *
 * @param sourceUrl
 *            URL of the source file
 * @param outputEncoding
 *            encoding type to use for the output data
 * @param outputPacketLengthMs
 *            amount of data to put into each packet
 * @param rtpSession
 *            {@code RtpSession} to use for streaming the data
 * @throws UnsupportedAudioFileException
 *             if the source file is in an unsupported format or if the
 *             source file cannot be converted to the specifed encoding type
 * @throws IOException
 *             if there is problem reading the source file
 */
public AudioFileStreamer(URL sourceUrl, EncodingType outputEncoding, long outputPacketLengthMs,
        RtpSession rtpSession) throws UnsupportedAudioFileException, IOException {
    this.sourceUrl = Preconditions.checkNotNull(sourceUrl, "Audio file source URL cannot be null.");
    this.outputEncodingType = Preconditions.checkNotNull(outputEncoding,
            "Output encoding type cannot be null.");
    this.rtpSession = Preconditions.checkNotNull(rtpSession, "RtpSession cannot be null.");
    this.outputPacketLengthMs = outputPacketLengthMs;

    // Read input source:
    AudioInputStream sourceStream = AudioSystem.getAudioInputStream(sourceUrl);
    AudioFormat conversionFormat = getConversionFormat(sourceStream.getFormat(), outputEncoding);
    LOGGER.debug("Input format: {}", audioFormatToString(sourceStream.getFormat()));
    LOGGER.debug("Conversion format: {}", audioFormatToString(conversionFormat));

    // Convert to output format:
    AudioInputStream outputStream = AudioSystem.getAudioInputStream(conversionFormat, sourceStream);
    outputFormat = outputStream.getFormat();
    LOGGER.debug("Output format: {}", audioFormatToString(outputFormat));

    // Buffer the output data:
    outputDataBuffer = ByteBuffer.wrap(IOUtils.toByteArray(outputStream));

    // Calculate packet size:
    numSamplesPerPacket = getNumberOfSamplesPerTimePeriod(outputFormat, outputPacketLengthMs,
            TimeUnit.MILLISECONDS);
    int sampleSizeBytes = outputStream.getFormat().getSampleSizeInBits() / 8;
    payloadSizeBytes = numSamplesPerPacket * sampleSizeBytes;
}

From source file:edu.tsinghua.lumaqq.Sounder.java

/**
 * //from   w  w w  .j av a2 s.c om
 * @param filename
 * @return
 */
private boolean loadSound(String filename) {
    // ??
    File file = new File(filename);
    try {
        currentSound = AudioSystem.getAudioInputStream(file);
    } catch (Exception e) {
        try {
            FileInputStream is = new FileInputStream(file);
            currentSound = new BufferedInputStream(is, 1024);
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    // ??????
    if (currentSound instanceof AudioInputStream) {
        try {
            AudioInputStream stream = (AudioInputStream) currentSound;
            AudioFormat format = stream.getFormat();

            // ?? ALAW/ULAW ?  ALAW/ULAW ?? PCM                
            if ((format.getEncoding() == AudioFormat.Encoding.ULAW)
                    || (format.getEncoding() == AudioFormat.Encoding.ALAW)) {
                AudioFormat tmp = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                        format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                        format.getFrameRate(), true);
                stream = AudioSystem.getAudioInputStream(tmp, stream);
                format = tmp;
            }
            DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                    ((int) stream.getFrameLength() * format.getFrameSize()));

            Clip clip = (Clip) AudioSystem.getLine(info);
            clip.open(stream);
            currentSound = clip;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    } else if (currentSound instanceof Sequence || currentSound instanceof BufferedInputStream) {
        try {
            sequencer.open();
            if (currentSound instanceof Sequence) {
                sequencer.setSequence((Sequence) currentSound);
            } else {
                sequencer.setSequence((BufferedInputStream) currentSound);
            }
            log.trace("Sequence Created");
        } catch (InvalidMidiDataException imde) {
            log.error("???");
            currentSound = null;
            return false;
        } catch (Exception ex) {
            log.error(ex.getMessage());
            currentSound = null;
            return false;
        }
    }

    return true;
}

From source file:SoundPlayer.java

public SoundPlayer(File f, boolean isMidi) throws IOException, UnsupportedAudioFileException,
        LineUnavailableException, MidiUnavailableException, InvalidMidiDataException {
    if (isMidi) { // The file is a MIDI file
        midi = true;//from   w w w .  jav a2  s .c  o m
        // First, get a Sequencer to play sequences of MIDI events
        // That is, to send events to a Synthesizer at the right time.
        sequencer = MidiSystem.getSequencer(); // Used to play sequences
        sequencer.open(); // Turn it on.

        // Get a Synthesizer for the Sequencer to send notes to
        Synthesizer synth = MidiSystem.getSynthesizer();
        synth.open(); // acquire whatever resources it needs

        // The Sequencer obtained above may be connected to a Synthesizer
        // by default, or it may not. Therefore, we explicitly connect it.
        Transmitter transmitter = sequencer.getTransmitter();
        Receiver receiver = synth.getReceiver();
        transmitter.setReceiver(receiver);

        // Read the sequence from the file and tell the sequencer about it
        sequence = MidiSystem.getSequence(f);
        sequencer.setSequence(sequence);
        audioLength = (int) sequence.getTickLength(); // Get sequence length
    } else { // The file is sampled audio
        midi = false;
        // Getting a Clip object for a file of sampled audio data is kind
        // of cumbersome. The following lines do what we need.
        AudioInputStream ain = AudioSystem.getAudioInputStream(f);
        try {
            DataLine.Info info = new DataLine.Info(Clip.class, ain.getFormat());
            clip = (Clip) AudioSystem.getLine(info);
            clip.open(ain);
        } finally { // We're done with the input stream.
            ain.close();
        }
        // Get the clip length in microseconds and convert to milliseconds
        audioLength = (int) (clip.getMicrosecondLength() / 1000);
    }

    // Now create the basic GUI
    play = new JButton("Play"); // Play/stop button
    progress = new JSlider(0, audioLength, 0); // Shows position in sound
    time = new JLabel("0"); // Shows position as a #

    // When clicked, start or stop playing the sound
    play.addActionListener(new ActionListener() {
        public void actionPerformed(ActionEvent e) {
            if (playing)
                stop();
            else
                play();
        }
    });

    // Whenever the slider value changes, first update the time label.
    // Next, if we're not already at the new position, skip to it.
    progress.addChangeListener(new ChangeListener() {
        public void stateChanged(ChangeEvent e) {
            int value = progress.getValue();
            // Update the time label
            if (midi)
                time.setText(value + "");
            else
                time.setText(value / 1000 + "." + (value % 1000) / 100);
            // If we're not already there, skip there.
            if (value != audioPosition)
                skip(value);
        }
    });

    // This timer calls the tick() method 10 times a second to keep
    // our slider in sync with the music.
    timer = new javax.swing.Timer(100, new ActionListener() {
        public void actionPerformed(ActionEvent e) {
            tick();
        }
    });

    // put those controls in a row
    Box row = Box.createHorizontalBox();
    row.add(play);
    row.add(progress);
    row.add(time);

    // And add them to this component.
    setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
    this.add(row);

    // Now add additional controls based on the type of the sound
    if (midi)
        addMidiControls();
    else
        addSampledControls();
}

From source file:org.yccheok.jstock.chat.Utils.java

public static void playSound(final Sound sound) {
    if (sounds.size() == 0) {
        for (Sound s : Sound.values()) {
            AudioInputStream stream = null;
            Clip clip = null;//from   ww w  .  jav  a  2 s  .co m

            try {
                switch (s) {
                case ALERT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "alert.wav"));
                    break;
                case LOGIN:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "login.wav"));
                    break;
                case LOGOUT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "logout.wav"));
                    break;
                case RECEIVE:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "receive.wav"));
                    break;
                case SEND:
                    stream = AudioSystem.getAudioInputStream(new File(Utils.getSoundsDirectory() + "send.wav"));
                    break;
                default:
                    throw new java.lang.IllegalArgumentException("Missing case " + sound);
                }

                // At present, ALAW and ULAW encodings must be converted
                // to PCM_SIGNED before it can be played
                AudioFormat format = stream.getFormat();
                if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                    format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                            format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                            format.getFrameRate(), true); // big endian
                    stream = AudioSystem.getAudioInputStream(format, stream);
                }

                // Create the clip
                DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                        ((int) stream.getFrameLength() * format.getFrameSize()));
                clip = (Clip) AudioSystem.getLine(info);

                // This method does not return until the audio file is completely loaded
                clip.open(stream);
                clip.drain();
                sounds.put(s, clip);
            } catch (MalformedURLException e) {
                log.error(null, e);
            } catch (IOException e) {
                log.error(null, e);
            } catch (LineUnavailableException e) {
                log.error(null, e);
            } catch (UnsupportedAudioFileException e) {
                log.error(null, e);
            } finally {
            }
        }

    }
    soundPool.execute(new Runnable() {
        @Override
        public void run() {
            Clip clip = sounds.get(sound);

            if (clip == null) {
                return;
            }

            clip.stop();
            clip.flush();
            clip.setFramePosition(0);
            clip.loop(0);
            // Wait for the sound to finish.
            //while (clip.isRunning()) {
            //    try {
            //        Thread.sleep(1);
            //    } catch (InterruptedException ex) {
            //        log.error(null, ex);
            //    }
            //}
        }
    });
}