Example usage for javax.sound.sampled AudioInputStream getFormat

List of usage examples for javax.sound.sampled AudioInputStream getFormat

Introduction

In this page you can find the example usage for javax.sound.sampled AudioInputStream getFormat.

Prototype

public AudioFormat getFormat() 

Source Link

Document

Obtains the audio format of the sound data in this audio input stream.

Usage

From source file:net.sf.firemox.tools.MToolKit.java

/**
 * loadClip loads the sound-file into a clip.
 * /*from w  ww . j a va2 s.c o m*/
 * @param soundFile
 *          file to be loaded and played.
 */
public static void loadClip(String soundFile) {
    AudioFormat audioFormat = null;
    AudioInputStream actionIS = null;
    try {
        // actionIS = AudioSystem.getAudioInputStream(input); // Does not work !
        actionIS = AudioSystem.getAudioInputStream(MToolKit.getFile(MToolKit.getSoundFile(soundFile)));
        AudioFormat.Encoding targetEncoding = AudioFormat.Encoding.PCM_SIGNED;
        actionIS = AudioSystem.getAudioInputStream(targetEncoding, actionIS);
        audioFormat = actionIS.getFormat();

    } catch (UnsupportedAudioFileException afex) {
        Log.error(afex);
    } catch (IOException ioe) {

        if (ioe.getMessage().equalsIgnoreCase("mark/reset not supported")) { // Ignore
            Log.error("IOException ignored.");
        }
        Log.error(ioe.getStackTrace());
    }

    // define the required attributes for our line,
    // and make sure a compatible line is supported.

    // get the source data line for play back.
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    if (!AudioSystem.isLineSupported(info)) {
        Log.error("LineCtrl matching " + info + " not supported.");
        return;
    }

    // Open the source data line for play back.
    try {
        Clip clip = null;
        try {
            Clip.Info info2 = new Clip.Info(Clip.class, audioFormat);
            clip = (Clip) AudioSystem.getLine(info2);
            clip.open(actionIS);
            clip.start();
        } catch (IOException ioe) {
            Log.error(ioe);
        }
    } catch (LineUnavailableException ex) {
        Log.error("Unable to open the line: " + ex);
        return;
    }
}

From source file:org.yccheok.jstock.chat.Utils.java

public static void playSound(final Sound sound) {
    if (sounds.size() == 0) {
        for (Sound s : Sound.values()) {
            AudioInputStream stream = null;
            Clip clip = null;/*w  w  w .ja  v a2  s. co  m*/

            try {
                switch (s) {
                case ALERT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "alert.wav"));
                    break;
                case LOGIN:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "login.wav"));
                    break;
                case LOGOUT:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "logout.wav"));
                    break;
                case RECEIVE:
                    stream = AudioSystem
                            .getAudioInputStream(new File(Utils.getSoundsDirectory() + "receive.wav"));
                    break;
                case SEND:
                    stream = AudioSystem.getAudioInputStream(new File(Utils.getSoundsDirectory() + "send.wav"));
                    break;
                default:
                    throw new java.lang.IllegalArgumentException("Missing case " + sound);
                }

                // At present, ALAW and ULAW encodings must be converted
                // to PCM_SIGNED before it can be played
                AudioFormat format = stream.getFormat();
                if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                    format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(),
                            format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2,
                            format.getFrameRate(), true); // big endian
                    stream = AudioSystem.getAudioInputStream(format, stream);
                }

                // Create the clip
                DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(),
                        ((int) stream.getFrameLength() * format.getFrameSize()));
                clip = (Clip) AudioSystem.getLine(info);

                // This method does not return until the audio file is completely loaded
                clip.open(stream);
                clip.drain();
                sounds.put(s, clip);
            } catch (MalformedURLException e) {
                log.error(null, e);
            } catch (IOException e) {
                log.error(null, e);
            } catch (LineUnavailableException e) {
                log.error(null, e);
            } catch (UnsupportedAudioFileException e) {
                log.error(null, e);
            } finally {
            }
        }

    }
    soundPool.execute(new Runnable() {
        @Override
        public void run() {
            Clip clip = sounds.get(sound);

            if (clip == null) {
                return;
            }

            clip.stop();
            clip.flush();
            clip.setFramePosition(0);
            clip.loop(0);
            // Wait for the sound to finish.
            //while (clip.isRunning()) {
            //    try {
            //        Thread.sleep(1);
            //    } catch (InterruptedException ex) {
            //        log.error(null, ex);
            //    }
            //}
        }
    });
}

From source file:edu.mit.csail.sls.wami.relay.WamiRelay.java

/**
 * This delegates recognition to the {@link IRecognizer} associated with
 * this relay, providing the appropriate callbacks
 * /*from ww  w .j  a va 2s.c o  m*/
 * @param audioIn
 *            The audio input stream to recognizer
 * @throws RecognizerException
 *             On recognition error
 * @throws IOException
 *             on error reading from the audioIn stream
 */
public void recognize(AudioInputStream audioIn) throws RecognizerException, IOException {
    final ByteArrayOutputStream audioByteStream = new ByteArrayOutputStream();
    final AudioFormat audioFormat = audioIn.getFormat();
    TeeInputStream tee = new TeeInputStream(audioIn, audioByteStream, true);
    AudioInputStream forkedStream = new AudioInputStream(tee, audioIn.getFormat(), AudioSystem.NOT_SPECIFIED);

    if (recognizer == null) {
        throw new RecognizerException("No recognizer specified!");
    } else if (wamiApp == null) {
        throw new RecognizerException("No wami app specified!");
    }

    recognizer.recognize(forkedStream, new IRecognitionListener() {
        private long startedTimestamp;

        public void onRecognitionResult(final IRecognitionResult result) {
            // if the result is final, then before we delegate it
            // we switch over our audio stream so that
            // getLastRecordedAudio() works properly inside of
            // on RecognitionResult
            long timestampMillis = System.currentTimeMillis();
            if (!result.isIncremental()) {
                try {
                    audioByteStream.close();
                } catch (IOException e) {
                    e.printStackTrace(); // shouldn't occur
                }

                synchronized (lastAudioLock) {
                    lastAudioBytes = audioByteStream.toByteArray();
                    lastAudioFormat = audioFormat;
                }
            }
            wamiApp.onRecognitionResult(result);
            logEvent(result, timestampMillis);

            if (!result.isIncremental()) {
                logUtterance(audioByteStream.toByteArray(), audioFormat, startedTimestamp);
            }
        }

        public void onRecognitionStarted() {
            startedTimestamp = System.currentTimeMillis();
            logEvent(new RecognitionStartedLogEvent(), startedTimestamp);
            wamiApp.onRecognitionStarted();
        }

    });
}

From source file:org.sipfoundry.voicemail.mailbox.AbstractMailboxManager.java

protected void concatAudio(File newFile, File orig1, File orig2) throws Exception {
    String operation = "dunno";
    AudioInputStream clip1 = null;
    AudioInputStream clip2 = null;
    AudioInputStream concatStream = null;
    try {//from   ww  w  .  j a v  a  2s  .c  o  m
        operation = "getting AudioInputStream from " + orig1.getPath();
        clip1 = AudioSystem.getAudioInputStream(orig1);
        operation = "getting AudioInputStream from " + orig2.getPath();
        clip2 = AudioSystem.getAudioInputStream(orig2);

        operation = "building SequnceInputStream";
        concatStream = new AudioInputStream(new SequenceInputStream(clip1, clip2), clip1.getFormat(),
                clip1.getFrameLength() + clip2.getFrameLength());

        operation = "writing SequnceInputStream to " + newFile.getPath();
        AudioSystem.write(concatStream, AudioFileFormat.Type.WAVE, newFile);
        LOG.info("VmMessage::concatAudio created combined file " + newFile.getPath());
    } catch (Exception e) {
        String trouble = "VmMessage::concatAudio Problem while " + operation;
        throw new Exception(trouble, e);
    } finally {
        IOUtils.closeQuietly(clip1);
        IOUtils.closeQuietly(clip2);
        IOUtils.closeQuietly(concatStream);
    }
}

From source file:com.opensmile.maven.EmoRecService.java

private double getAudioDuration(String filename) {
    File file = new File(filename);
    AudioInputStream audioInputStream;
    float durationInSeconds = 0;
    try {/*from w w  w  .  ja  v  a 2 s  .co  m*/
        audioInputStream = AudioSystem.getAudioInputStream(file);
        AudioFormat format = audioInputStream.getFormat();
        long audioFileLength = file.length();
        int frameSize = format.getFrameSize();
        float frameRate = format.getFrameRate();
        durationInSeconds = (audioFileLength / (frameSize * frameRate));
    } catch (UnsupportedAudioFileException | IOException e) {
        e.printStackTrace();
    }

    return durationInSeconds;
}

From source file:org.jtrfp.trcl.core.ResourceManager.java

public ResourceManager(final TR tr) {
    this.tr = tr;
    try {//from w ww .j av a  2  s.  c  o m
        Class.forName("de.quippy.javamod.multimedia.mod.loader.tracker.ProTrackerMod");
        Class.forName("de.quippy.javamod.multimedia.mod.ModContainer"); // ModContainer uses the ModFactory!!
    } catch (Exception e) {
        tr.showStopper(e);
    }
    gpuResidentMODs = new CachedObjectFactory<String, GPUResidentMOD>() {
        @Override
        protected GPUResidentMOD generate(String key) {
            return new GPUResidentMOD(tr, getMOD(key));
        }//end generate(...)
    };
    soundTextures = new CachedObjectFactory<String, SoundTexture>() {
        @Override
        protected SoundTexture generate(String key) {
            try {
                final AudioInputStream ais = AudioSystem
                        .getAudioInputStream(getInputStreamFromResource("SOUND\\" + key));
                final FloatBuffer fb = ByteBuffer.allocateDirect((int) ais.getFrameLength() * 4)
                        .order(ByteOrder.nativeOrder()).asFloatBuffer();
                int value;
                while ((value = ais.read()) != -1) {
                    fb.put(((float) (value - 128)) / 128f);
                }
                fb.clear();
                return tr.soundSystem.get().newSoundTexture(fb, (int) ais.getFormat().getFrameRate());
            } catch (Exception e) {
                tr.showStopper(e);
                return null;
            }
        }
    };

    setupPODListeners();
}

From source file:Filter3dTest.java

/**
 * Opens a sound from a file.//from w ww .  j  a va2  s . co  m
 */
public SimpleSoundPlayer(String filename) {
    try {
        // open the audio input stream
        AudioInputStream stream = AudioSystem.getAudioInputStream(new File(filename));

        format = stream.getFormat();

        // get the audio samples
        samples = getSamples(stream);
    } catch (UnsupportedAudioFileException ex) {
        ex.printStackTrace();
    } catch (IOException ex) {
        ex.printStackTrace();
    }
}

From source file:SoundManagerTest.java

/**
 * Loads a Sound from an AudioInputStream.
 *///from   w w w  . j av  a  2s  . c  o  m
public Sound getSound(AudioInputStream audioStream) {
    if (audioStream == null) {
        return null;
    }

    // get the number of bytes to read
    int length = (int) (audioStream.getFrameLength() * audioStream.getFormat().getFrameSize());

    // read the entire stream
    byte[] samples = new byte[length];
    DataInputStream is = new DataInputStream(audioStream);
    try {
        is.readFully(samples);
        is.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    // return the samples
    return new Sound(samples);
}

From source file:com.gameminers.mav.firstrun.TeachSphinxThread.java

@Override
public void run() {
    try {/*from  ww w  . j  ava  2s.  c om*/
        File training = new File(Mav.configDir, "training-data");
        training.mkdirs();
        while (Mav.silentFrames < 30) {
            sleep(100);
        }
        Mav.listening = true;
        InputStream prompts = ClassLoader.getSystemResourceAsStream("resources/sphinx/train/arcticAll.prompts");
        List<String> arctic = IOUtils.readLines(prompts);
        IOUtils.closeQuietly(prompts);
        Mav.audioManager.playClip("listen1");
        byte[] buf = new byte[2048];
        int start = 0;
        int end = 21;
        AudioInputStream in = Mav.audioManager.getSource().getAudioInputStream();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        while (true) {
            for (int i = start; i < end; i++) {
                baos.reset();
                String prompt = arctic.get(i);
                RenderState.setText("\u00A7LRead this aloud:\n" + Fonts.wrapStringToFit(
                        prompt.substring(prompt.indexOf(':') + 1), Fonts.base[1], Display.getWidth()));
                File file = new File(training, prompt.substring(0, prompt.indexOf(':')) + ".wav");
                file.createNewFile();
                int read = 0;
                while (Mav.silentListenFrames > 0) {
                    read = Mav.audioManager.getSource().getAudioInputStream().read(buf);
                }
                baos.write(buf, 0, read);
                while (Mav.silentListenFrames < 60) {
                    in.read(buf);
                    if (read == -1) {
                        RenderState.setText(
                                "\u00A7LAn error occurred\nUnexpected end of stream\nPlease restart Mav");
                        RenderState.targetHue = 0;
                        return;
                    }
                    baos.write(buf, 0, read);
                }
                AudioSystem.write(new AudioInputStream(new ByteArrayInputStream(baos.toByteArray()),
                        in.getFormat(), baos.size() / 2), AudioFileFormat.Type.WAVE, file);
                Mav.audioManager.playClip("notif2");
            }
            Mav.ttsInterface.say(Mav.phoneticUserName
                    + ", that should be enough for now. Do you want to keep training anyway?");
            RenderState.setText("\u00A7LOkay, " + Mav.userName
                    + "\nI think that should be\nenough. Do you want to\nkeep training anyway?\n\u00A7s(Say 'Yes' or 'No' out loud)");
            break;
            //start = end+1;
            //end += 20;
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:marytts.tools.redstart.AdminWindow.java

private void jButton_DisplayActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton_DisplayActionPerformed
    Prompt selectedPrompt = promptArray[getCurrentRow()];
    try {/*from  ww  w  .  ja  v a 2 s  .c  o m*/
        File f = selectedPrompt.getRecording().getFile();
        AudioInputStream audio = AudioSystem.getAudioInputStream(f);
        if (audio.getFormat().getChannels() > 1) {
            audio = new MonoAudioInputStream(audio, optionsDialog.getInputMode());
        }
        MultiDisplay d = new MultiDisplay(audio, selectedPrompt.getBasename(), false);
    } catch (Exception e) {
        e.printStackTrace();
    }
}