List of usage examples for javax.sound.sampled AudioSystem getAudioInputStream
public static AudioInputStream getAudioInputStream(final File file) throws UnsupportedAudioFileException, IOException
From source file:Main.java
public static void main(String[] argv) throws Exception { AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile")); }
From source file:Main.java
public static void main(String[] argv) throws Exception { AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile")); // From URL// w w w. java2 s . c om // stream = AudioSystem.getAudioInputStream(new URL( // "http://hostname/audiofile")); AudioFormat format = stream.getFormat(); if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(), format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2, format.getFrameRate(), true); // big endian stream = AudioSystem.getAudioInputStream(format, stream); } DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(), ((int) stream.getFrameLength() * format.getFrameSize())); Clip clip = (Clip) AudioSystem.getLine(info); clip.open(stream); clip.start(); }
From source file:Main.java
public static void main(String[] argv) throws Exception { AudioInputStream stream = AudioSystem.getAudioInputStream(new File("audiofile")); // stream = AudioSystem.getAudioInputStream(new URL( // "http://hostname/audiofile")); AudioFormat format = stream.getFormat(); if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(), format.getSampleSizeInBits() * 2, format.getChannels(), format.getFrameSize() * 2, format.getFrameRate(), true); // big endian stream = AudioSystem.getAudioInputStream(format, stream); }/*from www. ja va 2 s . co m*/ SourceDataLine.Info info = new DataLine.Info(SourceDataLine.class, stream.getFormat(), ((int) stream.getFrameLength() * format.getFrameSize())); SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info); line.open(stream.getFormat()); line.start(); int numRead = 0; byte[] buf = new byte[line.getBufferSize()]; while ((numRead = stream.read(buf, 0, buf.length)) >= 0) { int offset = 0; while (offset < numRead) { offset += line.write(buf, offset, numRead - offset); } } line.drain(); line.stop(); }
From source file:ClipTest.java
public static void main(String[] args) throws Exception { // specify the sound to play // (assuming the sound can be played by the audio system) File soundFile = new File("../sounds/voice.wav"); AudioInputStream sound = AudioSystem.getAudioInputStream(soundFile); // load the sound into memory (a Clip) DataLine.Info info = new DataLine.Info(Clip.class, sound.getFormat()); Clip clip = (Clip) AudioSystem.getLine(info); clip.open(sound);/*from ww w . jav a2s . c o m*/ // due to bug in Java Sound, explicitly exit the VM when // the sound has stopped. clip.addLineListener(new LineListener() { public void update(LineEvent event) { if (event.getType() == LineEvent.Type.STOP) { event.getLine().close(); System.exit(0); } } }); // play the sound clip clip.start(); }
From source file:marytts.signalproc.effects.VocalTractLinearScalerEffect.java
/** * Command line interface to the vocal tract linear scaler effect. * /*from w ww .j ava2s . c o m*/ * @param args the command line arguments. Exactly two arguments are expected: * (1) the factor by which to scale the vocal tract (between 0.25 = very long and 4.0 = very short vocal tract); * (2) the filename of the wav file to modify. * Will produce a file basename_factor.wav, where basename is the filename without the extension. * @throws Exception if processing fails for some reason. */ public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println( "Usage: java " + VocalTractLinearScalerEffect.class.getName() + " <factor> <filename>"); System.exit(1); } float factor = Float.parseFloat(args[0]); String filename = args[1]; AudioDoubleDataSource input = new AudioDoubleDataSource( AudioSystem.getAudioInputStream(new File(filename))); AudioFormat format = input.getAudioFormat(); VocalTractLinearScalerEffect effect = new VocalTractLinearScalerEffect((int) format.getSampleRate()); DoubleDataSource output = effect.apply(input, "amount:" + factor); DDSAudioInputStream audioOut = new DDSAudioInputStream(output, format); String outFilename = FilenameUtils.removeExtension(filename) + "_" + factor + ".wav"; AudioSystem.write(audioOut, AudioFileFormat.Type.WAVE, new File(outFilename)); System.out.println("Created file " + outFilename); }
From source file:marytts.tools.analysis.CopySynthesis.java
/** * @param args// ww w . j a va2 s. c o m */ public static void main(String[] args) throws Exception { String wavFilename = null; String labFilename = null; String pitchFilename = null; String textFilename = null; String locale = System.getProperty("locale"); if (locale == null) { throw new IllegalArgumentException("No locale given (-Dlocale=...)"); } for (String arg : args) { if (arg.endsWith(".txt")) textFilename = arg; else if (arg.endsWith(".wav")) wavFilename = arg; else if (arg.endsWith(".ptc")) pitchFilename = arg; else if (arg.endsWith(".lab")) labFilename = arg; else throw new IllegalArgumentException("Don't know how to treat argument: " + arg); } // The intonation contour double[] contour = null; double frameShiftTime = -1; if (pitchFilename == null) { // need to create pitch contour from wav file if (wavFilename == null) { throw new IllegalArgumentException("Need either a pitch file or a wav file"); } AudioInputStream ais = AudioSystem.getAudioInputStream(new File(wavFilename)); AudioDoubleDataSource audio = new AudioDoubleDataSource(ais); PitchFileHeader params = new PitchFileHeader(); params.fs = (int) ais.getFormat().getSampleRate(); F0TrackerAutocorrelationHeuristic tracker = new F0TrackerAutocorrelationHeuristic(params); tracker.pitchAnalyze(audio); frameShiftTime = tracker.getSkipSizeInSeconds(); contour = tracker.getF0Contour(); } else { // have a pitch file -- ignore any wav file PitchReaderWriter f0rw = new PitchReaderWriter(pitchFilename); if (f0rw.contour == null) { throw new NullPointerException("Cannot read f0 contour from " + pitchFilename); } contour = f0rw.contour; frameShiftTime = f0rw.header.skipSizeInSeconds; } assert contour != null; assert frameShiftTime > 0; // The ALLOPHONES data and labels if (labFilename == null) { throw new IllegalArgumentException("No label file given"); } if (textFilename == null) { throw new IllegalArgumentException("No text file given"); } MaryTranscriptionAligner aligner = new MaryTranscriptionAligner(); aligner.SetEnsureInitialBoundary(false); String labels = MaryTranscriptionAligner.readLabelFile(aligner.getEntrySeparator(), aligner.getEnsureInitialBoundary(), labFilename); MaryHttpClient mary = new MaryHttpClient(); String text = FileUtils.readFileToString(new File(textFilename), "ASCII"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); mary.process(text, "TEXT", "ALLOPHONES", locale, null, null, baos); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); docFactory.setNamespaceAware(true); DocumentBuilder builder = docFactory.newDocumentBuilder(); Document doc = builder.parse(bais); aligner.alignXmlTranscriptions(doc, labels); assert doc != null; // durations double[] endTimes = new LabelfileDoubleDataSource(new File(labFilename)).getAllData(); assert endTimes.length == labels.split(Pattern.quote(aligner.getEntrySeparator())).length; // Now add durations and f0 targets to document double prevEnd = 0; NodeIterator ni = MaryDomUtils.createNodeIterator(doc, MaryXML.PHONE, MaryXML.BOUNDARY); for (int i = 0; i < endTimes.length; i++) { Element e = (Element) ni.nextNode(); if (e == null) throw new IllegalStateException("More durations than elements -- this should not happen!"); double durInSeconds = endTimes[i] - prevEnd; int durInMillis = (int) (1000 * durInSeconds); if (e.getTagName().equals(MaryXML.PHONE)) { e.setAttribute("d", String.valueOf(durInMillis)); e.setAttribute("end", new Formatter(Locale.US).format("%.3f", endTimes[i]).toString()); // f0 targets at beginning, mid, and end of phone StringBuilder f0String = new StringBuilder(); double startF0 = getF0(contour, frameShiftTime, prevEnd); if (startF0 != 0 && !Double.isNaN(startF0)) { f0String.append("(0,").append((int) startF0).append(")"); } double midF0 = getF0(contour, frameShiftTime, prevEnd + 0.5 * durInSeconds); if (midF0 != 0 && !Double.isNaN(midF0)) { f0String.append("(50,").append((int) midF0).append(")"); } double endF0 = getF0(contour, frameShiftTime, endTimes[i]); if (endF0 != 0 && !Double.isNaN(endF0)) { f0String.append("(100,").append((int) endF0).append(")"); } if (f0String.length() > 0) { e.setAttribute("f0", f0String.toString()); } } else { // boundary e.setAttribute("duration", String.valueOf(durInMillis)); } prevEnd = endTimes[i]; } if (ni.nextNode() != null) { throw new IllegalStateException("More elements than durations -- this should not happen!"); } // TODO: add pitch values String acoustparams = DomUtils.document2String(doc); System.out.println("ACOUSTPARAMS:"); System.out.println(acoustparams); }
From source file:Main.java
public void playSound(final String filename) throws Exception { InputStream audioSrc = this.getClass().getResourceAsStream(filename); InputStream bufferedIn = new BufferedInputStream(audioSrc); audioStream = AudioSystem.getAudioInputStream(bufferedIn); audioFormat = audioStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); sourceLine = (SourceDataLine) AudioSystem.getLine(info); sourceLine.open(audioFormat);/*from ww w . j a v a 2s . c o m*/ sourceLine.start(); running = true; while (running) { audioStream.mark(BUFFER_SIZE); int nBytesRead = 0; byte[] abData = new byte[BUFFER_SIZE]; while (nBytesRead != -1) { try { nBytesRead = audioStream.read(abData, 0, abData.length); } catch (IOException e) { e.printStackTrace(); } if (nBytesRead >= 0) { sourceLine.write(abData, 0, nBytesRead); } } if (running) audioStream.reset(); } sourceLine.drain(); sourceLine.close(); }
From source file:Main.java
public Main() throws Exception { JFileChooser chooser = new JFileChooser(); chooser.showOpenDialog(null);/*from www. ja v a 2s. c o m*/ soundFile = chooser.getSelectedFile(); System.out.println("Playing " + soundFile.getName()); Line.Info linfo = new Line.Info(Clip.class); Line line = AudioSystem.getLine(linfo); clip = (Clip) line; clip.addLineListener(this); AudioInputStream ais = AudioSystem.getAudioInputStream(soundFile); clip.open(ais); clip.start(); }
From source file:CoreJavaSound.java
public CoreJavaSound() throws Exception { JFileChooser chooser = new JFileChooser(); chooser.showOpenDialog(null);/*from w w w.jav a 2s .c o m*/ soundFile = chooser.getSelectedFile(); System.out.println("Playing " + soundFile.getName()); Line.Info linfo = new Line.Info(Clip.class); Line line = AudioSystem.getLine(linfo); clip = (Clip) line; clip.addLineListener(this); AudioInputStream ais = AudioSystem.getAudioInputStream(soundFile); clip.open(ais); clip.start(); }
From source file:Main.java
public Main() throws Exception { URL imageURL = getClass().getClassLoader().getResource("images/k.jpeg"); buttonIcon = new ImageIcon(imageURL); button = new JButton("Click to Buh!", buttonIcon); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (buhClip != null) { buhClip.setFramePosition(0); buhClip.start();/* ww w .j a v a2 s .c o m*/ } else System.out.println("Couldn't load sound"); } }); getContentPane().add(button); URL soundURL = getClass().getClassLoader().getResource("sounds/b.aiff"); Line.Info linfo = new Line.Info(Clip.class); Line line = AudioSystem.getLine(linfo); buhClip = (Clip) line; AudioInputStream ais = AudioSystem.getAudioInputStream(soundURL); buhClip.open(ais); }