Example usage for javax.sound.sampled SourceDataLine close

List of usage examples for javax.sound.sampled SourceDataLine close

Introduction

In this page you can find the example usage for javax.sound.sampled SourceDataLine close.

Prototype

@Override
void close();

Source Link

Document

Closes the line, indicating that any system resources in use by the line can be released.

Usage

From source file:Main.java

public static void main(String args[]) throws Exception {
    final ByteArrayOutputStream out = new ByteArrayOutputStream();
    float sampleRate = 8000;
    int sampleSizeInBits = 8;
    int channels = 1;
    boolean signed = true;
    boolean bigEndian = true;
    final AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
    DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
    final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
    line.open(format);/*from   w  w w .  j av  a 2s.c om*/
    line.start();
    Runnable runner = new Runnable() {
        int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

        byte buffer[] = new byte[bufferSize];

        public void run() {
            try {

                int count = line.read(buffer, 0, buffer.length);
                if (count > 0) {
                    out.write(buffer, 0, count);
                }

                out.close();
            } catch (IOException e) {
                System.err.println("I/O problems: " + e);
                System.exit(-1);
            }
        }
    };
    Thread captureThread = new Thread(runner);
    captureThread.start();

    byte audio[] = out.toByteArray();
    InputStream input = new ByteArrayInputStream(audio);
    final SourceDataLine line1 = (SourceDataLine) AudioSystem.getLine(info);
    final AudioInputStream ais = new AudioInputStream(input, format, audio.length / format.getFrameSize());
    line1.open(format);
    line1.start();

    runner = new Runnable() {
        int bufferSize = (int) format.getSampleRate() * format.getFrameSize();

        byte buffer[] = new byte[bufferSize];

        public void run() {
            try {
                int count;
                while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
                    if (count > 0) {
                        line1.write(buffer, 0, count);
                    }
                }
                line1.drain();
                line1.close();
            } catch (IOException e) {
                System.err.println("I/O problems: " + e);
                System.exit(-3);
            }
        }
    };
    Thread playThread = new Thread(runner);
    playThread.start();

}

From source file:com.music.tools.ScaleTester.java

public static void main(String[] args) {
    System.out.println(//from w  ww.  j  ava 2 s. co  m
            "Usage: java ScaleTester <fundamental frequency> <chromatic scale size> <scale size> <use ET>");
    final AudioFormat af = new AudioFormat(sampleRate, 16, 1, true, true);
    try {
        fundamentalFreq = getArgument(args, 0, FUNDAMENTAL_FREQUENCY, Double.class);
        int pitchesInChromaticScale = getArgument(args, 1, CHROMATIC_SCALE_SILZE, Integer.class);

        List<Double> harmonicFrequencies = new ArrayList<>();
        List<String> ratios = new ArrayList<>();
        Set<Double> frequencies = new HashSet<Double>();
        frequencies.add(fundamentalFreq);
        int octaveMultiplier = 2;
        for (int i = 2; i < 100; i++) {
            // Exclude the 7th harmonic TODO exclude the 11th as well?
            // http://www.phy.mtu.edu/~suits/badnote.html
            if (i % 7 == 0) {
                continue;
            }
            double actualFreq = fundamentalFreq * i;
            double closestTonicRatio = actualFreq / (fundamentalFreq * octaveMultiplier);
            if (closestTonicRatio < 1 || closestTonicRatio > 2) {
                octaveMultiplier *= 2;
            }
            double closestTonic = actualFreq - actualFreq % (fundamentalFreq * octaveMultiplier);
            double normalizedFreq = fundamentalFreq * (actualFreq / closestTonic);

            harmonicFrequencies.add(actualFreq);
            frequencies.add(normalizedFreq);
            if (frequencies.size() == pitchesInChromaticScale) {
                break;
            }
        }

        System.out.println("Harmonic (overtone) frequencies: " + harmonicFrequencies);
        System.out.println("Transposed harmonic frequencies: " + frequencies);

        List<Double> chromaticScale = new ArrayList<>(frequencies);
        Collections.sort(chromaticScale);

        // find the "perfect" interval (e.g. perfect fifth)
        int perfectIntervalIndex = 0;
        int idx = 0;
        for (Iterator<Double> it = chromaticScale.iterator(); it.hasNext();) {
            Double noteFreq = it.next();
            long[] fraction = findCommonFraction(noteFreq / fundamentalFreq);
            fractionCache.put(noteFreq, fraction);
            if (fraction[0] == 3 && fraction[1] == 2) {
                perfectIntervalIndex = idx;
                System.out.println("Perfect interval (3/2) idx: " + perfectIntervalIndex);
            }
            idx++;
            ratios.add(Arrays.toString(fraction));
        }
        System.out.println("Ratios to fundemental frequency: " + ratios);

        if (getBooleanArgument(args, 4, USE_ET)) {
            chromaticScale = temper(chromaticScale);
        }

        System.out.println();
        System.out.println("Chromatic scale: " + chromaticScale);

        Set<Double> scaleSet = new HashSet<Double>();
        scaleSet.add(chromaticScale.get(0));
        idx = 0;
        List<Double> orderedInCircle = new ArrayList<>();
        // now go around the circle of perfect intervals and put the notes
        // in order
        while (orderedInCircle.size() < chromaticScale.size()) {
            orderedInCircle.add(chromaticScale.get(idx));
            idx += perfectIntervalIndex;
            idx = idx % chromaticScale.size();
        }
        System.out.println("Pitches Ordered in circle of perfect intervals: " + orderedInCircle);

        List<Double> scale = new ArrayList<Double>(scaleSet);
        int currentIdxInCircle = orderedInCircle.size() - 1; // start with
                                                             // the last
                                                             // note in the
                                                             // circle
        int scaleSize = getArgument(args, 3, SCALE_SIZE, Integer.class);
        while (scale.size() < scaleSize) {
            double pitch = orderedInCircle.get(currentIdxInCircle % orderedInCircle.size());
            if (!scale.contains(pitch)) {
                scale.add(pitch);
            }
            currentIdxInCircle++;
        }
        Collections.sort(scale);

        System.out.println("Scale: " + scale);

        SourceDataLine line = AudioSystem.getSourceDataLine(af);
        line.open(af);
        line.start();

        Double[] scaleFrequencies = scale.toArray(new Double[scale.size()]);

        // first play the whole scale
        WaveMelodyGenerator.playScale(line, scaleFrequencies);
        // then generate a random melody in the scale
        WaveMelodyGenerator.playMelody(line, scaleFrequencies);

        line.drain();
        line.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:fr.ritaly.dungeonmaster.audio.SoundSystemV1.java

public void play(Position position, final AudioClip clip) {
    if (position == null) {
        throw new IllegalArgumentException("The given position is null");
    }/*from   w w w . j av  a  2 s  . c o m*/
    if (clip == null) {
        throw new IllegalArgumentException("The given audio clip is null");
    }
    if (!isInitialized()) {
        return;
    }

    if (log.isDebugEnabled()) {
        log.debug("Submitting new task ...");
    }

    executorService.execute(new Runnable() {
        @Override
        public void run() {
            final Sound sound = sounds.get(clip.getSound());

            if (sound == null) {
                throw new IllegalArgumentException("Unsupported sound <" + clip + ">");
            }

            final DataLine.Info info = new DataLine.Info(SourceDataLine.class, sound.getAudioFormat());

            try {
                final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);

                line.open(sound.getAudioFormat());
                line.start();
                line.write(sound.getData(), 0, sound.getData().length);
                line.drain();
                line.close();
            } catch (LineUnavailableException e) {
                throw new RuntimeException(e);
            }
        }
    });

    if (log.isDebugEnabled()) {
        log.debug("Task submitted");
    }

    // TODO Calculer distance, pan, attnuation ...
}

From source file:com.github.woz_dialog.ros_woz_dialog_project.TTSHTTPClient.java

public void synthesise(String utterance) throws Exception {

    try {/*w w w.  ja  va  2s.  c  om*/
        log.fine("calling Nuance server to synthesise utterance \"" + utterance + "\"");

        HttpPost httppost = new HttpPost(ttsURI);
        httppost.addHeader("Content-Type", "text/plain");
        httppost.addHeader("Accept", "audio/x-wav;codec=pcm;bit=16;rate=16000");
        HttpEntity entity = new StringEntity(utterance);

        //HttpEntity entity = new ByteArrayEntity(utterance.getBytes("UTF-8"));

        httppost.setEntity(entity);

        HttpResponse response = ttsClient.execute(httppost);

        HttpEntity resEntity = response.getEntity();

        if (resEntity == null || response.getStatusLine().getStatusCode() != 200) {
            System.out.println("Response status: " + response.getStatusLine());
            throw new Exception("Response status: " + response.getStatusLine());
        }

        format = new AudioFormat(16000, 16, 1, true, false);

        System.out.println(response.getStatusLine().getStatusCode());

        data = new byte[0];
        write(resEntity.getContent());
        httppost.releaseConnection();

        //Get the file path
        String basepath = System.getProperty("user.home");
        basepath = basepath + "/wav/" + LANGUAGE + "/" + VOICE;
        File dir = new File(basepath);

        if (!dir.exists()) {
            // attempt to create the directory here
            boolean successful = dir.mkdirs();
            if (successful) {
                // creating the directory succeeded
                System.out.println("directory was created successfully");
            } else {
                // creating the directory failed
                log.severe("failed trying to create the directory");
                throw new Exception("failed trying to create the directory");
            }

            return;

        }

        String fullpath = basepath + "/" + utterance.toLowerCase() + ".wav";

        //Record the sound
        generateFile(data, new File(fullpath));

        //Play the received sound

        SourceDataLine line = AudioSystem.getSourceDataLine(format);

        line.open(format);
        line.start();

        rewind();

        int nBytesRead = 0;
        byte[] abData = new byte[512 * 16];

        while (nBytesRead != -1) {
            nBytesRead = read(abData, 0, abData.length);

            if (nBytesRead >= 0) {
                line.write(abData, 0, nBytesRead);
            }
        }

        line.drain();
        if (line.isOpen()) {
            line.close();
        }

    } catch (LineUnavailableException e) {
        log.warning("Audio line is unavailable: " + e);
        throw e;
    } catch (Exception e) {
        throw e;
    }

}

From source file:SoundManagerTest.java

/**
 * Signals that a PooledThread has stopped. Drains and closes the Thread's
 * Line./*w  w  w .j  a v a  2 s  . c  om*/
 */
protected void threadStopped() {
    SourceDataLine line = (SourceDataLine) localLine.get();
    if (line != null) {
        line.drain();
        line.close();
    }
}

From source file:Filter3dTest.java

/**
 * Plays a stream. This method blocks (doesn't return) until the sound is
 * finished playing./* w w w.  j av  a 2s  .  c  om*/
 */
public void play(InputStream source) {

    // use a short, 100ms (1/10th sec) buffer for real-time
    // change to the sound stream
    int bufferSize = format.getFrameSize() * Math.round(format.getSampleRate() / 10);
    byte[] buffer = new byte[bufferSize];

    // create a line to play to
    SourceDataLine line;
    try {
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format, bufferSize);
    } catch (LineUnavailableException ex) {
        ex.printStackTrace();
        return;
    }

    // start the line
    line.start();

    // copy data to the line
    try {
        int numBytesRead = 0;
        while (numBytesRead != -1) {
            numBytesRead = source.read(buffer, 0, buffer.length);
            if (numBytesRead != -1) {
                line.write(buffer, 0, numBytesRead);
            }
        }
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    // wait until all data is played, then close the line
    line.drain();
    line.close();

}

From source file:Main.java

/** Read sampled audio data from the specified URL and play it */
public static void streamSampledAudio(URL url)
        throws IOException, UnsupportedAudioFileException, LineUnavailableException {
    AudioInputStream ain = null; // We read audio data from here
    SourceDataLine line = null; // And write it here.

    try {/*from w  w w  .  j  av  a  2  s .  com*/
        // Get an audio input stream from the URL
        ain = AudioSystem.getAudioInputStream(url);

        // Get information about the format of the stream
        AudioFormat format = ain.getFormat();
        DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

        // If the format is not supported directly (i.e. if it is not PCM
        // encoded, then try to transcode it to PCM.
        if (!AudioSystem.isLineSupported(info)) {
            // This is the PCM format we want to transcode to.
            // The parameters here are audio format details that you
            // shouldn't need to understand for casual use.
            AudioFormat pcm = new AudioFormat(format.getSampleRate(), 16, format.getChannels(), true, false);

            // Get a wrapper stream around the input stream that does the
            // transcoding for us.
            ain = AudioSystem.getAudioInputStream(pcm, ain);

            // Update the format and info variables for the transcoded data
            format = ain.getFormat();
            info = new DataLine.Info(SourceDataLine.class, format);
        }

        // Open the line through which we'll play the streaming audio.
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(format);

        // Allocate a buffer for reading from the input stream and writing
        // to the line. Make it large enough to hold 4k audio frames.
        // Note that the SourceDataLine also has its own internal buffer.
        int framesize = format.getFrameSize();
        byte[] buffer = new byte[4 * 1024 * framesize]; // the buffer
        int numbytes = 0; // how many bytes

        // We haven't started the line yet.
        boolean started = false;

        for (;;) { // We'll exit the loop when we reach the end of stream
            // First, read some bytes from the input stream.
            int bytesread = ain.read(buffer, numbytes, buffer.length - numbytes);
            // If there were no more bytes to read, we're done.
            if (bytesread == -1)
                break;
            numbytes += bytesread;

            // Now that we've got some audio data, to write to the line,
            // start the line, so it will play that data as we write it.
            if (!started) {
                line.start();
                started = true;
            }

            // We must write bytes to the line in an integer multiple of
            // the framesize. So figure out how many bytes we'll write.
            int bytestowrite = (numbytes / framesize) * framesize;

            // Now write the bytes. The line will buffer them and play
            // them. This call will block until all bytes are written.
            line.write(buffer, 0, bytestowrite);

            // If we didn't have an integer multiple of the frame size,
            // then copy the remaining bytes to the start of the buffer.
            int remaining = numbytes - bytestowrite;
            if (remaining > 0)
                System.arraycopy(buffer, bytestowrite, buffer, 0, remaining);
            numbytes = remaining;
        }

        // Now block until all buffered sound finishes playing.
        line.drain();
    } finally { // Always relinquish the resources we use
        if (line != null)
            line.close();
        if (ain != null)
            ain.close();
    }
}

From source file:tvbrowser.extras.reminderplugin.ReminderPlugin.java

/**
 * Plays a sound./*  w w  w  .ja v  a2  s .  c o  m*/
 *
 * @param fileName
 *          The file name of the sound to play.
 * @return The sound Object.
 */
public static Object playSound(final String fileName) {
    try {
        if (StringUtils.endsWithIgnoreCase(fileName, ".mid")) {
            final Sequencer sequencer = MidiSystem.getSequencer();
            sequencer.open();

            final InputStream midiFile = new FileInputStream(fileName);
            sequencer.setSequence(MidiSystem.getSequence(midiFile));

            sequencer.start();

            new Thread("Reminder MIDI sequencer") {
                @Override
                public void run() {
                    setPriority(Thread.MIN_PRIORITY);
                    while (sequencer.isRunning()) {
                        try {
                            Thread.sleep(100);
                        } catch (Exception ee) {
                            // ignore
                        }
                    }

                    try {
                        sequencer.close();
                        midiFile.close();
                    } catch (Exception ee) {
                        // ignore
                    }
                }
            }.start();

            return sequencer;
        } else {
            final AudioInputStream ais = AudioSystem.getAudioInputStream(new File(fileName));

            final AudioFormat format = ais.getFormat();
            final DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);

            if (AudioSystem.isLineSupported(info)) {
                final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);

                line.open(format);
                line.start();

                new Thread("Reminder audio playing") {
                    private boolean stopped;

                    @Override
                    public void run() {
                        byte[] myData = new byte[1024 * format.getFrameSize()];
                        int numBytesToRead = myData.length;
                        int numBytesRead = 0;
                        int total = 0;
                        int totalToRead = (int) (format.getFrameSize() * ais.getFrameLength());
                        stopped = false;

                        line.addLineListener(new LineListener() {
                            public void update(LineEvent event) {
                                if (line != null && !line.isRunning()) {
                                    stopped = true;
                                    line.close();
                                    try {
                                        ais.close();
                                    } catch (Exception ee) {
                                        // ignore
                                    }
                                }
                            }
                        });

                        try {
                            while (total < totalToRead && !stopped) {
                                numBytesRead = ais.read(myData, 0, numBytesToRead);

                                if (numBytesRead == -1) {
                                    break;
                                }

                                total += numBytesRead;
                                line.write(myData, 0, numBytesRead);
                            }
                        } catch (Exception e) {
                        }

                        line.drain();
                        line.stop();
                    }
                }.start();

                return line;
            } else {
                URL url = new File(fileName).toURI().toURL();
                AudioClip clip = Applet.newAudioClip(url);
                clip.play();
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
        if ((new File(fileName)).isFile()) {
            URL url;
            try {
                url = new File(fileName).toURI().toURL();
                AudioClip clip = Applet.newAudioClip(url);
                clip.play();
            } catch (MalformedURLException e1) {
            }
        } else {
            String msg = mLocalizer.msg("error.1", "Error loading reminder sound file!\n({0})", fileName);
            JOptionPane.showMessageDialog(UiUtilities.getBestDialogParent(MainFrame.getInstance()), msg,
                    Localizer.getLocalization(Localizer.I18N_ERROR), JOptionPane.ERROR_MESSAGE);
        }
    }
    return null;
}