List of usage examples for javax.sound.sampled AudioSystem getLine
public static Line getLine(Line.Info info) throws LineUnavailableException
From source file:com.limegroup.gnutella.gui.mp3.BasicPlayer.java
/** * Inits a DateLine.<br>//from w w w .j av a 2 s .c o m * * We check if the line supports Volume and Pan controls. * * From the AudioInputStream, i.e. from the sound file, we * fetch information about the format of the audio data. These * information include the sampling frequency, the number of * channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line * for this audio file. * Furthermore, we have to give JavaSound a hint about how * big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't * care about the exact size. JavaSound will use some default * value for the buffer size. */ private void createLine() throws LineUnavailableException { if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); if (LOG.isDebugEnabled()) LOG.debug("Source format : " + sourceFormat); AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2, sourceFormat.getSampleRate(), false); if (LOG.isDebugEnabled()) LOG.debug("Target format: " + targetFormat); m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); if (LOG.isDebugEnabled()) LOG.debug("Create Line : " + audioFormat); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); m_line = (SourceDataLine) AudioSystem.getLine(info); /*-- Display supported controls --*/ Control[] c = m_line.getControls(); for (int p = 0; p < c.length; p++) { if (LOG.isDebugEnabled()) LOG.debug("Controls : " + c[p].toString()); } /*-- Is Gain Control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) { m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN); if (LOG.isDebugEnabled()) LOG.debug("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "]," + m_gainControl.getPrecision()); } /*-- Is Pan control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.PAN)) { m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN); if (LOG.isDebugEnabled()) LOG.debug("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "]," + m_panControl.getPrecision()); } } }
From source file:io.github.jeremgamer.editor.panels.MusicFrame.java
public MusicFrame(JFrame frame, final GeneralSave gs) { ArrayList<BufferedImage> icons = new ArrayList<BufferedImage>(); try {// w ww . ja v a 2s . c o m icons.add(ImageIO.read(ImageGetter.class.getResource("icon16.png"))); icons.add(ImageIO.read(ImageGetter.class.getResource("icon32.png"))); icons.add(ImageIO.read(ImageGetter.class.getResource("icon64.png"))); icons.add(ImageIO.read(ImageGetter.class.getResource("icon128.png"))); } catch (IOException e1) { e1.printStackTrace(); } this.setIconImages((List<? extends Image>) icons); this.setTitle("Musique"); this.setSize(new Dimension(300, 225)); this.addWindowListener(new WindowListener() { @Override public void windowActivated(WindowEvent event) { } @Override public void windowClosed(WindowEvent event) { } @Override public void windowClosing(WindowEvent event) { try { gs.save(new File("projects/" + Editor.getProjectName() + "/general.rbd")); } catch (IOException e) { e.printStackTrace(); } if (clip != null) { clip.stop(); clip.close(); try { audioStream.close(); } catch (IOException e) { e.printStackTrace(); } } } @Override public void windowDeactivated(WindowEvent event) { } @Override public void windowDeiconified(WindowEvent event) { } @Override public void windowIconified(WindowEvent event) { } @Override public void windowOpened(WindowEvent event) { } }); this.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE); content.setLayout(new BoxLayout(content, BoxLayout.PAGE_AXIS)); this.setModal(true); this.setLocationRelativeTo(frame); JPanel properties = new JPanel(); properties.setBorder(BorderFactory.createTitledBorder("Lecture")); ButtonGroup bg = new ButtonGroup(); bg.add(one); bg.add(loop); one.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent event) { JRadioButton rb = (JRadioButton) event.getSource(); if (rb.isSelected()) { gs.set("music.reading", 0); try { gs.save(new File("projects/" + Editor.getProjectName() + "/general.rbd")); } catch (IOException e) { e.printStackTrace(); } if (clip != null) { if (clip.isRunning()) clip.loop(0); } } } }); loop.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent event) { JRadioButton rb = (JRadioButton) event.getSource(); if (rb.isSelected()) { gs.set("music.reading", 1); try { gs.save(new File("projects/" + Editor.getProjectName() + "/general.rbd")); } catch (IOException e) { e.printStackTrace(); } if (clip != null) { if (clip.isRunning()) clip.loop(Clip.LOOP_CONTINUOUSLY); } } } }); properties.add(one); properties.add(loop); if (gs.getInt("music.reading") == 0) { one.setSelected(true); } else { loop.setSelected(true); } volume.setMaximum(100); volume.setMinimum(0); volume.setValue(30); volume.setPaintTicks(true); volume.setPaintLabels(true); volume.setMinorTickSpacing(10); volume.setMajorTickSpacing(20); volume.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent event) { JSlider slider = (JSlider) event.getSource(); double value = slider.getValue(); gain = value / 100; dB = (float) (Math.log(gain) / Math.log(10.0) * 20.0); if (clip != null) gainControl.setValue(dB); gs.set("music.volume", (int) value); } }); volume.setValue(gs.getInt("music.volume")); properties.add(volume); properties.setPreferredSize(new Dimension(300, 125)); content.add(properties); JPanel browsePanel = new JPanel(); browsePanel.setBorder(BorderFactory.createTitledBorder("")); JButton browse = new JButton("Parcourir..."); if (new File("projects/" + Editor.getProjectName() + "/music.wav").exists()) { preview.setEnabled(false); browse.setText(""); try { browse.setIcon(new ImageIcon(ImageIO.read(ImageGetter.class.getResource("remove.png")))); } catch (IOException e) { e.printStackTrace(); } } browse.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { JButton button = (JButton) event.getSource(); if (new File("projects/" + Editor.getProjectName() + "/music.wav").exists()) { if (clip != null) { clip.stop(); clip.close(); try { audioStream.close(); } catch (IOException e) { e.printStackTrace(); } } name.setText(""); preview.setEnabled(false); button.setText("Parcourir..."); button.setIcon(null); new File("projects/" + Editor.getProjectName() + "/music.wav").delete(); gs.set("music.name", ""); } else { String path = null; JFileChooser chooser = new JFileChooser(Editor.lastPath); FileNameExtensionFilter filter = new FileNameExtensionFilter("Audio (WAV)", "wav"); chooser.setFileFilter(filter); chooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int option = chooser.showOpenDialog(null); if (option == JFileChooser.APPROVE_OPTION) { path = chooser.getSelectedFile().getAbsolutePath(); Editor.lastPath = chooser.getSelectedFile().getParent(); copyMusic(new File(path)); button.setText(""); try { button.setIcon( new ImageIcon(ImageIO.read(ImageGetter.class.getResource("remove.png")))); } catch (IOException e) { e.printStackTrace(); } gs.set("music.name", new File(path).getName()); try { gs.save(new File("projects/" + Editor.getProjectName() + "/general.rbd")); } catch (IOException e) { e.printStackTrace(); } name.setText(new File(path).getName()); preview.setEnabled(true); } } } }); if (new File("projects/" + Editor.getProjectName() + "/music.wav").exists()) { preview.setEnabled(true); } else { preview.setEnabled(false); } preview.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { JToggleButton tb = (JToggleButton) event.getSource(); if (tb.isSelected()) { try { audioStream = AudioSystem.getAudioInputStream( new File("projects/" + Editor.getProjectName() + "/music.wav")); format = audioStream.getFormat(); info = new DataLine.Info(Clip.class, format); clip = (Clip) AudioSystem.getLine(info); clip.open(audioStream); clip.start(); gainControl = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN); gainControl.setValue(dB); if (loop.isSelected()) { clip.loop(Clip.LOOP_CONTINUOUSLY); } else { clip.loop(0); } clip.addLineListener(new LineListener() { @Override public void update(LineEvent event) { Clip clip = (Clip) event.getSource(); if (!clip.isRunning()) { preview.setSelected(false); clip.stop(); clip.close(); try { audioStream.close(); } catch (IOException e) { e.printStackTrace(); } } } }); } catch (Exception exc) { exc.printStackTrace(); } } else { clip.stop(); clip.close(); try { audioStream.close(); } catch (IOException e) { e.printStackTrace(); } } } }); JPanel buttons = new JPanel(); buttons.setLayout(new BorderLayout()); buttons.add(browse, BorderLayout.WEST); buttons.add(preview, BorderLayout.EAST); browsePanel.setLayout(new BorderLayout()); browsePanel.add(buttons, BorderLayout.NORTH); browsePanel.add(name, BorderLayout.SOUTH); name.setPreferredSize(new Dimension(280, 25)); name.setText(gs.getString("music.name")); content.add(browsePanel); this.setContentPane(content); this.setVisible(true); }
From source file:com.offbynull.voip.audio.gateways.io.AudioRunnable.java
private Object openDevices(Address fromAddress, Address toAddress, OpenDevicesRequest request) { if (outputDevices == null || inputDevices == null) { return new ErrorResponse("Devices not loaded"); }/*w w w .j av a2s . c o m*/ if (openOutputDevice != null || openInputDevice != null) { return new ErrorResponse("Devices already open"); } int outputId = request.getOutputId(); int inputId = request.getInputId(); LineEntry outputLineEntry = outputDevices.get(outputId); if (outputLineEntry == null) { LOG.error("Output device not available: {}", outputId); return new ErrorResponse("Output device " + outputId + " not available"); } LineEntry inputLineEntry = inputDevices.get(inputId); if (inputLineEntry == null) { LOG.error("Input device not available: {}", inputId); return new ErrorResponse("Input device " + inputId + " not available"); } // open input device try { openInputDevice = (TargetDataLine) AudioSystem.getLine(inputLineEntry.getLineInfo()); openInputDevice.open(EXPECTED_FORMAT); openInputDevice.start(); } catch (Exception e) { openInputDevice = null; LOG.error("Unable to open input device", e); return new ErrorResponse("Unable to open input device"); } // open output device try { openOutputDevice = (SourceDataLine) AudioSystem.getLine(outputLineEntry.getLineInfo()); openOutputDevice.open(EXPECTED_FORMAT); openOutputDevice.start(); } catch (Exception e) { try { openInputDevice.close(); } catch (Exception innerE) { LOG.error("Unable to close input device", innerE); } openInputDevice = null; openOutputDevice = null; LOG.error("Unable to open output device", e); return new ErrorResponse("Unable to open output device"); } // start input read thread InputReadRunnable inputReadRunnable = new InputReadRunnable(openInputDevice, bus, INPUT_BUFFER_SIZE); inputReadThread = new Thread(inputReadRunnable); inputReadThread.setDaemon(true); inputReadThread.setName(getClass().getSimpleName() + "-" + inputReadRunnable.getClass().getSimpleName()); inputReadThread.start(); // start input read thread outputQueue = new LinkedBlockingQueue<>(); OutputWriteRunnable outputWriteRunnable = new OutputWriteRunnable(openOutputDevice, outputQueue, OUTPUT_BUFFER_SIZE); outputWriteThread = new Thread(outputWriteRunnable); outputWriteThread.setDaemon(true); outputWriteThread .setName(getClass().getSimpleName() + "-" + outputWriteRunnable.getClass().getSimpleName()); outputWriteThread.start(); // set address to shuttle input PCM blocks to openedFromAddress = fromAddress; openedToAddress = toAddress; return new SuccessResponse(); }
From source file:com.player.BasicMP3Player.java
/** * Inits a DateLine.<br>/*from w ww. jav a 2s . c o m*/ * We check if the line supports Gain and Pan controls. From the AudioInputStream, i.e. from the * sound file, we fetch information about the format of the audio data. These information include * the sampling frequency, the number of channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line for this audio file. Furthermore, we * have to give JavaSound a hint about how big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't care about the exact size. JavaSound * will use some default value for the buffer size. */ private void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), 16, sourceFormat.getChannels(), sourceFormat.getChannels() * 2, sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.error("Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); m_line = (SourceDataLine) AudioSystem.getLine(info); /*-- Display supported controls --*/ Control[] c = m_line.getControls(); for (int p = 0; p < c.length; p++) { log.debug("Controls : " + c[p].toString()); } /*-- Is Gain Control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN)) { m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN); log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "] " + m_gainControl.getPrecision()); } /*-- Is Pan control supported ? --*/ if (m_line.isControlSupported(FloatControl.Type.PAN)) { m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN); log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] " + m_panControl.getPrecision()); } } }
From source file:BasicPlayer.java
/** * Inits a DateLine.<br>//w w w.ja v a 2 s .c o m * * We check if the line supports Gain and Pan controls. * * From the AudioInputStream, i.e. from the sound file, we * fetch information about the format of the audio data. These * information include the sampling frequency, the number of * channels and the size of the samples. There information * are needed to ask JavaSound for a suitable output line * for this audio file. * Furthermore, we have to give JavaSound a hint about how * big the internal buffer for the line should be. Here, * we say AudioSystem.NOT_SPECIFIED, signaling that we don't * care about the exact size. JavaSound will use some default * value for the buffer size. */ protected void createLine() throws LineUnavailableException { log.info("Create Line"); if (m_line == null) { AudioFormat sourceFormat = m_audioInputStream.getFormat(); log.info("Create Line : Source format : " + sourceFormat.toString()); int nSampleSizeInBits = sourceFormat.getSampleSizeInBits(); if (nSampleSizeInBits <= 0) nSampleSizeInBits = 16; if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) nSampleSizeInBits = 16; if (nSampleSizeInBits != 8) nSampleSizeInBits = 16; AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false); log.info("Create Line : Target format: " + targetFormat); // Keep a reference on encoded stream to progress notification. m_encodedaudioInputStream = m_audioInputStream; try { // Get total length in bytes of the encoded stream. encodedLength = m_encodedaudioInputStream.available(); } catch (IOException e) { log.error("Cannot get m_encodedaudioInputStream.available()", e); } // Create decoded stream. m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream); AudioFormat audioFormat = m_audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); Mixer mixer = getMixer(m_mixerName); if (mixer != null) { log.info("Mixer : " + mixer.getMixerInfo().toString()); m_line = (SourceDataLine) mixer.getLine(info); } else { m_line = (SourceDataLine) AudioSystem.getLine(info); m_mixerName = null; } log.info("Line : " + m_line.toString()); log.debug("Line Info : " + m_line.getLineInfo().toString()); log.debug("Line AudioFormat: " + m_line.getFormat().toString()); } }
From source file:SoundManagerTest.java
/** * Signals that a PooledThread has started. Creates the Thread's line and * buffer./*from w ww . j a va 2 s . c o m*/ */ protected void threadStarted() { // wait for the SoundManager constructor to finish synchronized (this) { try { wait(); } catch (InterruptedException ex) { } } // use a short, 100ms (1/10th sec) buffer for filters that // change in real-time int bufferSize = playbackFormat.getFrameSize() * Math.round(playbackFormat.getSampleRate() / 10); // create, open, and start the line SourceDataLine line; DataLine.Info lineInfo = new DataLine.Info(SourceDataLine.class, playbackFormat); try { line = (SourceDataLine) AudioSystem.getLine(lineInfo); line.open(playbackFormat, bufferSize); } catch (LineUnavailableException ex) { // the line is unavailable - signal to end this thread Thread.currentThread().interrupt(); return; } line.start(); // create the buffer byte[] buffer = new byte[bufferSize]; // set this thread's locals localLine.set(line); localBuffer.set(buffer); }
From source file:net.sf.firemox.tools.MToolKit.java
/** * loadClip loads the sound-file into a clip. * /*from w w w. j a v a 2s .c o m*/ * @param soundFile * file to be loaded and played. */ public static void loadClip(String soundFile) { AudioFormat audioFormat = null; AudioInputStream actionIS = null; try { // actionIS = AudioSystem.getAudioInputStream(input); // Does not work ! actionIS = AudioSystem.getAudioInputStream(MToolKit.getFile(MToolKit.getSoundFile(soundFile))); AudioFormat.Encoding targetEncoding = AudioFormat.Encoding.PCM_SIGNED; actionIS = AudioSystem.getAudioInputStream(targetEncoding, actionIS); audioFormat = actionIS.getFormat(); } catch (UnsupportedAudioFileException afex) { Log.error(afex); } catch (IOException ioe) { if (ioe.getMessage().equalsIgnoreCase("mark/reset not supported")) { // Ignore Log.error("IOException ignored."); } Log.error(ioe.getStackTrace()); } // define the required attributes for our line, // and make sure a compatible line is supported. // get the source data line for play back. DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); if (!AudioSystem.isLineSupported(info)) { Log.error("LineCtrl matching " + info + " not supported."); return; } // Open the source data line for play back. try { Clip clip = null; try { Clip.Info info2 = new Clip.Info(Clip.class, audioFormat); clip = (Clip) AudioSystem.getLine(info2); clip.open(actionIS); clip.start(); } catch (IOException ioe) { Log.error(ioe); } } catch (LineUnavailableException ex) { Log.error("Unable to open the line: " + ex); return; } }
From source file:org.scantegrity.scanner.Scanner.java
private static void playAudioClip(int p_numTimes) { /*//from w w w. j av a 2s.c o m * Threaded Code....sigsegv when run * / if(c_audioThread != null && c_audioThread.isAlive()) { try { c_audioThread.join(2000); } catch (InterruptedException e) { c_log.log(Level.SEVERE, "Could not wait for previous sound thread."); } } c_audioThread = new Thread(new AudioFile(c_soundFile, p_numTimes)); c_audioThread.start(); /* * End threaded Code */ AudioInputStream l_stream = null; try { l_stream = AudioSystem.getAudioInputStream(new File(c_soundFileName)); } catch (UnsupportedAudioFileException e_uaf) { c_log.log(Level.WARNING, "Unsupported Audio File"); return; } catch (IOException e1) { c_log.log(Level.WARNING, "Could not Open Audio File"); return; } AudioFormat l_format = l_stream.getFormat(); Clip l_dataLine = null; DataLine.Info l_info = new DataLine.Info(Clip.class, l_format); if (!AudioSystem.isLineSupported(l_info)) { c_log.log(Level.WARNING, "Audio Line is not supported"); } try { l_dataLine = (Clip) AudioSystem.getLine(l_info); l_dataLine.open(l_stream); } catch (LineUnavailableException ex) { c_log.log(Level.WARNING, "Audio Line is unavailable."); } catch (IOException e) { c_log.log(Level.WARNING, "Cannot playback Audio, IO Exception."); } l_dataLine.loop(p_numTimes); try { Thread.sleep(160 * (p_numTimes + 1)); } catch (InterruptedException e) { c_log.log(Level.WARNING, "Could not sleep the audio player thread."); } l_dataLine.close(); }
From source file:org.snitko.app.playback.PlaySound.java
public void play(File inputFile) { try (final AudioInputStream in = AudioSystem.getAudioInputStream(inputFile)) { final AudioFormat outFormat = getOutFormat(in.getFormat()); final DataLine.Info info = new DataLine.Info(SourceDataLine.class, outFormat); try (final SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(info)) { if (sourceDataLine != null) { sourceDataLine.open(outFormat); sourceDataLine.start();//from ww w. j a va2 s .c o m AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(outFormat, in); stream(audioInputStream, sourceDataLine); sourceDataLine.drain(); sourceDataLine.stop(); } } } catch (UnsupportedAudioFileException | LineUnavailableException | IOException e) { throw new IllegalStateException(e); } }
From source file:org.yccheok.jstock.chat.Utils.java
public static void playSound(final Sound sound) { if (sounds.size() == 0) { for (Sound s : Sound.values()) { AudioInputStream stream = null; Clip clip = null;/*from w ww . j a va 2 s . c o m*/ try { switch (s) { case ALERT: stream = AudioSystem .getAudioInputStream(new File(Utils.getSoundsDirectory() + "alert.wav")); break; case LOGIN: stream = AudioSystem .getAudioInputStream(new File(Utils.getSoundsDirectory() + "login.wav")); break; case LOGOUT: stream = AudioSystem .getAudioInputStream(new File(Utils.getSoundsDirectory() + "logout.wav")); break; case RECEIVE: stream = AudioSystem .getAudioInputStream(new File(Utils.getSoundsDirectory() + "receive.wav")); break; case SEND: stream = AudioSystem.getAudioInputStream(new File(Utils.getSoundsDirectory() + "send.wav")); break; default: throw new java.lang.IllegalArgumentException("Missing case " + sound); } // At present, ALAW and ULAW encodings must be converted // to PCM_SIGNED before it can be played AudioFormat format = stream.getFormat(); if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(), format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2, format.getFrameRate(), true); // big endian stream = AudioSystem.getAudioInputStream(format, stream); } // Create the clip DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(), ((int) stream.getFrameLength() * format.getFrameSize())); clip = (Clip) AudioSystem.getLine(info); // This method does not return until the audio file is completely loaded clip.open(stream); clip.drain(); sounds.put(s, clip); } catch (MalformedURLException e) { log.error(null, e); } catch (IOException e) { log.error(null, e); } catch (LineUnavailableException e) { log.error(null, e); } catch (UnsupportedAudioFileException e) { log.error(null, e); } finally { } } } soundPool.execute(new Runnable() { @Override public void run() { Clip clip = sounds.get(sound); if (clip == null) { return; } clip.stop(); clip.flush(); clip.setFramePosition(0); clip.loop(0); // Wait for the sound to finish. //while (clip.isRunning()) { // try { // Thread.sleep(1); // } catch (InterruptedException ex) { // log.error(null, ex); // } //} } }); }