Example usage for javax.sound.sampled AudioSystem write

List of usage examples for javax.sound.sampled AudioSystem write

Introduction

In this page you can find the example usage for javax.sound.sampled AudioSystem write.

Prototype

public static int write(final AudioInputStream stream, final AudioFileFormat.Type fileType, final File out)
        throws IOException 

Source Link

Document

Writes a stream of bytes representing an audio file of the specified file type to the external file provided.

Usage

From source file:marytts.datatypes.MaryData.java

/**
 * Write our internal representation to output stream <code>os</code>,
 * in the appropriate way as determined by our <code>type</code>.
 *///  w w w. j a  v a  2  s  .c o  m
public void writeTo(OutputStream os) throws TransformerConfigurationException, FileNotFoundException,
        TransformerException, IOException, Exception {
    if (type.isUtterances())
        throw new IOException("Cannot write out utterance-based data type!");

    if (type.isXMLType()) {
        if (writer == null)
            writer = new MaryNormalisedWriter();
        if (logger.getEffectiveLevel().equals(Level.DEBUG)) {
            ByteArrayOutputStream debugOut = new ByteArrayOutputStream();
            writer.output(xmlDocument, debugOut);
            logger.debug(debugOut.toString());
        }
        writer.output(xmlDocument, new BufferedOutputStream(os));
    } else if (type.isTextType()) { // caution: XML types are text types!
        writeTo(new OutputStreamWriter(os, "UTF-8"));
    } else { // audio
        logger.debug("Writing audio output, frame length " + audio.getFrameLength());
        AudioSystem.write(audio, audioFileFormat.getType(), os);
        os.flush();
        os.close();
    }
}

From source file:it.univpm.deit.semedia.musicuri.core.Toolset.java

/**
 * Extracts/encodes the AudioSignatureDS for a given audio file
 * @param file the audio file to encode 
 * @return a string containing the whole XML-formatted MPEG-7 description document
 *//*from  w w w .ja v a  2 s .  com*/
public static String createMPEG7Description(File file) throws IOException {
    if (isSupportedAudioFile(file)) {
        System.out.println("Extracting Query Audio Signature");
        String xmlString = null;
        Config configuration = new ConfigDefault();
        configuration.enableAll(false);
        configuration.setValue("AudioSignature", "enable", true);
        configuration.setValue("AudioSignature", "decimation", 32);
        //System.out.println("File: " + file.getName());

        AudioInputStream ais = null;
        try {
            ais = AudioSystem.getAudioInputStream(file);
            AudioFormat f = ais.getFormat();
            if (f.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
                System.out.println("Converting Audio stream format");
                ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais);
                f = ais.getFormat();
            }

            String workingDir = getCWD();
            String tempFilename = workingDir + "/temp.wav";
            AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(tempFilename));

            File tmpFile = new File(tempFilename);
            AudioInFloatSampled audioin = new AudioInFloatSampled(tmpFile);

            String str = tmpFile.getCanonicalPath();
            String[] ar = { str };
            //xmlString = Encoder.fromWAVtoXML(ar);

            // gather information about audio file
            MP7MediaInformation media_info = new MP7MediaInformation();
            media_info.setFileSize(tmpFile.length());

            AudioFormat format = audioin.getSourceFormat();
            media_info.setSample(format.getSampleRate(), format.getSampleSizeInBits());
            media_info.setNumberOfChannels(audioin.isMono() ? 1 : 2);

            // create mpeg-7 writer
            MP7Writer mp7writer = new MP7Writer();
            mp7writer.setMediaInformation(media_info);

            // create encoder
            Encoder encoder = null;

            Config config = new ConfigDefault();
            config.enableAll(false);
            config.setValue("AudioSignature", "enable", true);
            config.setValue("AudioSignature", "decimation", 32);
            encoder = new Encoder(audioin.getSampleRate(), mp7writer, config);
            //encoder.addTimeElapsedListener(new Ticker(System.err));

            // copy audio signal from source to encoder
            long oldtime = System.currentTimeMillis();
            float[] audio;
            while ((audio = audioin.get()) != null) {
                if (!audioin.isMono())
                    audio = AudioInFloat.getMono(audio);
                encoder.put(audio);
            }
            encoder.flush();
            System.out.println("Extraction Time     : " + (System.currentTimeMillis() - oldtime) + " ms");

            // whole MPEG-7 description into a string
            xmlString = mp7writer.toString();
            //System.out.println( xmlString )

        } catch (Exception e) {
            e.printStackTrace(System.err);
        } finally {
            //ais.close();
        }

        return xmlString;
    } else {
        System.out.println("Unsupported audio file format");
        return null;
    }
}

From source file:org.sipfoundry.voicemail.mailbox.AbstractMailboxManager.java

protected void concatAudio(File newFile, File orig1, File orig2) throws Exception {
    String operation = "dunno";
    AudioInputStream clip1 = null;
    AudioInputStream clip2 = null;
    AudioInputStream concatStream = null;
    try {/*  ww w .ja  v  a 2s .c  om*/
        operation = "getting AudioInputStream from " + orig1.getPath();
        clip1 = AudioSystem.getAudioInputStream(orig1);
        operation = "getting AudioInputStream from " + orig2.getPath();
        clip2 = AudioSystem.getAudioInputStream(orig2);

        operation = "building SequnceInputStream";
        concatStream = new AudioInputStream(new SequenceInputStream(clip1, clip2), clip1.getFormat(),
                clip1.getFrameLength() + clip2.getFrameLength());

        operation = "writing SequnceInputStream to " + newFile.getPath();
        AudioSystem.write(concatStream, AudioFileFormat.Type.WAVE, newFile);
        LOG.info("VmMessage::concatAudio created combined file " + newFile.getPath());
    } catch (Exception e) {
        String trouble = "VmMessage::concatAudio Problem while " + operation;
        throw new Exception(trouble, e);
    } finally {
        IOUtils.closeQuietly(clip1);
        IOUtils.closeQuietly(clip2);
        IOUtils.closeQuietly(concatStream);
    }
}

From source file:marytts.http.controllers.MaryController.java

/**
 * Method to retrieve a signal already synthesized using the method {@link synthesize(String)} or the method {@link process(String)}
 *
 *    @param response the response to fill
 *    @throws Exception in case of failing (no synthesis called before, ...)
 *///from w  ww.  ja v a2s .c  om
@RequestMapping("/getSynthesizedSignal")
public void getSynthesizedSignal(HttpServletResponse response) throws Exception {
    if (ais == null) {
        throw new RuntimeException("No synthesis achieved => no signal to get !");
    }

    response.setContentType("audio/x-wav");
    AudioSystem.write(ais, AudioFileFormat.Type.WAVE, response.getOutputStream());
    response.flushBuffer();
}

From source file:org.sipfoundry.voicemail.VmMessage.java

/**
 * Combine two wav files into one bigger one
 * //  w w  w .j ava 2s  .  c o  m
 * @param newFile
 * @param orig1
 * @param orig2
 * @throws Exception
 */
static void concatAudio(File newFile, File orig1, File orig2) throws Exception {
    String operation = "dunno";
    try {
        operation = "getting AudioInputStream from " + orig1.getPath();
        AudioInputStream clip1 = AudioSystem.getAudioInputStream(orig1);
        operation = "getting AudioInputStream from " + orig2.getPath();
        AudioInputStream clip2 = AudioSystem.getAudioInputStream(orig2);

        operation = "building SequnceInputStream";
        AudioInputStream concatStream = new AudioInputStream(new SequenceInputStream(clip1, clip2),
                clip1.getFormat(), clip1.getFrameLength() + clip2.getFrameLength());

        operation = "writing SequnceInputStream to " + newFile.getPath();
        AudioSystem.write(concatStream, AudioFileFormat.Type.WAVE, newFile);
        LOG.info("VmMessage::concatAudio created combined file " + newFile.getPath());
    } catch (Exception e) {
        String trouble = "VmMessage::concatAudio Problem while " + operation;
        //           LOG.error(trouble, e);
        throw new Exception(trouble, e);
    }
}