List of usage examples for java.io BufferedReader ready
public boolean ready() throws IOException
From source file:configuration.Util.java
/** * Return the content of a file as an Array of String * @param filename//from w ww .j a v a 2 s . co m * @return an Array of String */ public static String[] InputFile(String filename) { filename = filename.trim(); Vector<String> tmp = new Vector<String>(); try { BufferedReader br = new BufferedReader(new FileReader(new File(filename))); while (br.ready()) { tmp.add(br.readLine()); } br.close(); } catch (Exception e) { System.out.println("Open " + filename + " Failed!"); System.out.println(e); Config.log("Unable to open " + filename); return new String[1]; } String[] tmp2 = new String[tmp.size()]; int index = 0; for (String s : tmp) tmp2[index++] = s; return tmp2; }
From source file:com.pironet.tda.SunJDKParser.java
/** * parses a loggc file stream and reads any found class histograms and adds the to the dump store * * @param loggcFileStream the stream to read * @param root the root node of the dumps. *//*from ww w.ja v a 2 s .c o m*/ public void parseLoggcFile(InputStream loggcFileStream, DefaultMutableTreeNode root) { final BufferedReader bis = new BufferedReader(new InputStreamReader(loggcFileStream)); final List<HistogramTableModel> histograms = new Vector<>(); try { while (bis.ready()) { bis.mark(getMarkSize()); String nextLine = bis.readLine(); if (nextLine.startsWith("num #instances #bytes class name")) { bis.reset(); histograms.add(parseNextClassHistogram(bis)); } } // now add the found histograms to the tree. for (int i = histograms.size() - 1; i >= 0; i--) { DefaultMutableTreeNode dump = getNextDumpForHistogram(root); if (dump != null) { addHistogramToDump(dump, histograms.get(i)); } } } catch (IOException ex) { ex.printStackTrace(); } finally { IOUtils.closeQuietly(bis); } }
From source file:com.ibm.amc.demo.provider.AmcDemoCommands.java
@Override public void setFirmware(DeviceContext deviceContext, InputStream inputStream) throws InvalidCredentialsException, DeviceExecutionException, AMPIOException, AMPException { if (logger.isEntryEnabled()) logger.entry("setFirmware", deviceContext, inputStream); final Device device = getDevice(deviceContext); String contents = null;//from www. j a va 2s . co m String tagname = "firmwareRev"; InputStreamReader inputStreamReader = null; BufferedReader bufferedReader = null; try { try { String openTag = "<" + tagname + ">"; String closeTag = "</" + tagname + ">"; inputStreamReader = new InputStreamReader(new Base64InputStream(inputStream), "ISO-8859-1"); bufferedReader = new BufferedReader(inputStreamReader); while (bufferedReader.ready() && (contents == null)) { String line = bufferedReader.readLine(); if (line != null) { if (line.indexOf("-----BEGIN ") > -1) { break; } int openTagIndex = line.indexOf(openTag); if (openTagIndex > -1) { int closeTagIndex = line.lastIndexOf(closeTag); int beginIndex = openTagIndex + openTag.length(); int endIndex = closeTagIndex; contents = line.substring(beginIndex, endIndex); } } } } finally { if (bufferedReader != null) { bufferedReader.close(); } else if (inputStreamReader != null) { inputStreamReader.close(); } else if (inputStream != null) { inputStream.close(); } } } catch (Throwable e) { throw new AMPException(e); } if (contents == null) { throw new AMPException(); } int periodIndex = contents.indexOf("."); String version = contents.substring(periodIndex + 1); device.setFirmwareLevel(version); unquiesceDevice(deviceContext); if (logger.isEntryEnabled()) logger.exit("setFirmware"); }
From source file:org.apache.flink.yarn.cli.FlinkYarnSessionCli.java
public static void runInteractiveCli(YarnClusterClient yarnCluster, boolean readConsoleInput) { final String HELP = "Available commands:\n" + "help - show these commands\n" + "stop - stop the YARN session"; int numTaskmanagers = 0; try {//from w w w .j a va2 s. com BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); label: while (true) { // ------------------ check if there are updates by the cluster ----------- GetClusterStatusResponse status = yarnCluster.getClusterStatus(); LOG.debug("Received status message: {}", status); if (status != null && numTaskmanagers != status.numRegisteredTaskManagers()) { System.err.println( "Number of connected TaskManagers changed to " + status.numRegisteredTaskManagers() + ". " + "Slots available: " + status.totalNumberOfSlots()); numTaskmanagers = status.numRegisteredTaskManagers(); } List<String> messages = yarnCluster.getNewMessages(); if (messages != null && messages.size() > 0) { System.err.println("New messages from the YARN cluster: "); for (String msg : messages) { System.err.println(msg); } } if (yarnCluster.getApplicationStatus() != ApplicationStatus.SUCCEEDED) { System.err.println("The YARN cluster has failed"); yarnCluster.shutdown(); } // wait until CLIENT_POLLING_INTERVAL is over or the user entered something. long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVALL * 1000 && (!readConsoleInput || !in.ready())) { Thread.sleep(200); } //------------- handle interactive command by user. ---------------------- if (readConsoleInput && in.ready()) { String command = in.readLine(); switch (command) { case "quit": case "stop": yarnCluster.shutdownCluster(); break label; case "help": System.err.println(HELP); break; default: System.err.println("Unknown command '" + command + "'. Showing help: \n" + HELP); break; } } if (yarnCluster.hasBeenShutdown()) { LOG.info("Stopping interactive command line interface, YARN cluster has been stopped."); break; } } } catch (Exception e) { LOG.warn("Exception while running the interactive command line interface", e); } }
From source file:configuration.Util.java
public static ArrayList<String> loadStrings(String filename) { ArrayList<String> tmp = new ArrayList<String>(); try {//w w w .j a va 2 s . com //Change to read UTF-8 here BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(new File(filename)), "ISO-8859-1")); while (br.ready()) { tmp.add(br.readLine()); } br.close(); } catch (Exception e) { System.out.println("LoadStrings Failed!"); System.out.println(e); } return tmp; }
From source file:uk.bl.dpt.qa.gui.DissimilarGUIThread.java
/** * Open input csv file// www.j a va 2s. c om * @param pFile file to load from */ private void internalOpenFile(File pFile) { init(); gResults = Collections.synchronizedList(new LinkedList<CheckResult>()); try { BufferedReader buf = new BufferedReader(new FileReader(pFile)); while (buf.ready()) { CheckResult result = new CheckResult(); String[] line = buf.readLine().split(","); if (line == null || line.length != 2) { continue; } result.setFileOne(new File(line[0])); result.setFileTwo(new File(line[1])); gResults.add(result); } buf.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } //initialise the results array to hold the correct number of results if (checkboxGenerateOnLoad.selectedProperty().get()) { //start a background thread to calculate all the values internalStartBackgroundLoadThread(); } //load the first image internalDisplayPair(gCurrentRecord); }
From source file:net.sourceforge.doddle_owl.ui.InputDocumentSelectionPanel.java
public void loadDocuments(File openDir) { File docInfo = new File(openDir, ProjectFileNames.DOC_INFO_FILE); if (!docInfo.exists()) { return;//from w ww . j a v a2 s . c o m } BufferedReader reader = null; try { FileInputStream fis = new FileInputStream(docInfo); reader = new BufferedReader(new InputStreamReader(fis, "UTF-8")); while (reader.ready()) { String line = reader.readLine(); String[] info = line.split(","); if (info.length != 3) { continue; } String type = info[0]; String fileName = info[1]; String lang = info[2]; if (type.equals("doc")) { DefaultListModel model = (DefaultListModel) docList.getModel(); model.addElement(new Document(lang, new File(fileName))); } else if (type.equals("inputDoc")) { DefaultListModel model = (DefaultListModel) inputDocList.getModel(); model.addElement(new Document(lang, new File(fileName))); } } inputTermSelectionPanel.setInputDocumentListModel(inputDocList.getModel()); } catch (IOException ioex) { ioex.printStackTrace(); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ioe2) { ioe2.printStackTrace(); } } }
From source file:mrcg.MRCGInstance.java
private void createSQL() throws Exception { String databaseType = getString("database.type"); DBScripter dbScripter = DB_SCRIPTER.get(databaseType); String script = dbScripter.buildScript(getString("database.name"), types); File file = new File(projectPath + "database/" + databaseType + "/01-create-generated.sql"); boolean changed = content(file, script, true); if (isSkipDatabaseEnabled()) return;/*w ww . j a va 2 s .c o m*/ if (isDesignAlwaysModeEnabled() || (changed && isDesignModeEnabled())) { System.out.print("Running SQL Script..."); Process process = Runtime.getRuntime().exec("bash runall.sh", null, new File(projectPath + "database/" + databaseType)); BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream())); BufferedReader err = new BufferedReader(new InputStreamReader(process.getErrorStream())); System.out.println(process.waitFor()); while (in.ready() || err.ready()) { if (in.ready()) { System.out.println(in.readLine()); } else if (err.ready()) { System.err.println(err.readLine()); } } } }
From source file:com.pironet.tda.SunJDKParser.java
/** * parses the next class histogram found in the stream, uses the max check * lines option to check how many lines to parse in advance. * <p>/*w w w .ja v a2s .co m*/ * This could be called from parseLoggcFile, which is outside our normal * calling stream. Thus, we have to pass in the BufferedReader. However, to * handle a WrappedSunJDKParser, we have to use getNextLine() if possible. * * @param bis the stream to read. */ private HistogramTableModel parseNextClassHistogram(BufferedReader bis) throws IOException { boolean finished = false; boolean found = false; final HistogramTableModel classHistogram = new HistogramTableModel(); int maxLinesCounter = 0; boolean isNormalBis = bis == getBis(); while (bis.ready() && !finished) { String line = (isNormalBis) ? getNextLine().trim() : bis.readLine().trim(); if (!found && !Strings.isNullOrEmpty(line)) { if (line.startsWith("num #instances #bytes class name")) { found = true; } else if (maxLinesCounter >= getMaxCheckLines()) { finished = true; } else { maxLinesCounter++; } } else if (found) { if (line.startsWith("Total ")) { // split string. String newLine = PATTERN_SPACE.matcher(line).replaceAll(";"); String[] elems = newLine.split(";"); classHistogram.setBytes(Long.parseLong(elems[2])); classHistogram.setInstances(Long.parseLong(elems[1])); finished = true; } else if (!line.startsWith("-------")) { // removed blank, breaks splitting using blank... String newLine = PATTERN_NONAME.matcher(line).replaceAll("<no-name>"); // split string. newLine = PATTERN_SPACE.matcher(newLine).replaceAll(";"); String[] elems = newLine.split(";"); if (elems.length == 4) { classHistogram.addEntry(elems[3].trim(), Integer.parseInt(elems[2].trim()), Integer.parseInt(elems[1].trim())); } else { classHistogram.setIncomplete(true); finished = true; } } } } return (classHistogram); }
From source file:com.civprod.writerstoolbox.OpenNLP.training.WordSplitingTokenizerTrainer.java
private void cmdTrainActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cmdTrainActionPerformed final WordSplitingTokenizerTrainer tempThis = this; final Charset utf8 = Charset.forName("UTF-8"); new Thread(() -> { textTestResults.setText(""); //create TokenizerFactory part of the training context WordSplittingTokenizerFactory myTokenizerFactory = new WordSplittingTokenizerFactory("EN", mAbbreviationDictionary, false, null, mSpellingDictionary, (TimeComplexity) comboTimeComplexity.getSelectedItem()); Tokenizer stdTokenizer = null;//from ww w .j a va 2 s .co m try { stdTokenizer = OpenNLPUtils.createTokenizer(); } catch (IOException ex) { Logger.getLogger(WordSplitingTokenizerTrainer.class.getName()).log(Level.SEVERE, null, ex); } Tokenizer myNonSplitingTokenizer = null; try { myNonSplitingTokenizer = OpenNLPUtils.createTokenizer(OpenNLPUtils.readTokenizerModel( OpenNLPUtils.buildModelFileStream(".\\data\\OpenNLP\\en-fiction-token.bin"))); } catch (IOException ex) { Logger.getLogger(WordSplitingTokenizerTrainer.class.getName()).log(Level.SEVERE, null, ex); } List<FileSplit> FileSplits = FileSplit.generateFileSplitsLOO(mFileCollectionListModel); File trainingFile = new File("en-token.train"); File testFile = new File("en-token.test"); SummaryStatistics curFStats = new SummaryStatistics(); SummaryStatistics curRecallStats = new SummaryStatistics(); SummaryStatistics curPrecisionStats = new SummaryStatistics(); SummaryStatistics stdFStats = new SummaryStatistics(); SummaryStatistics stdRecallStats = new SummaryStatistics(); SummaryStatistics stdPrecisionStats = new SummaryStatistics(); SummaryStatistics myNonSplitFStats = new SummaryStatistics(); SummaryStatistics myNonSplitRecallStats = new SummaryStatistics(); SummaryStatistics myNonSplitPrecisionStats = new SummaryStatistics(); java.io.BufferedWriter trainingFileWriter = null; for (FileSplit curFileSplit : FileSplits) { try { //create training file trainingFileWriter = new java.io.BufferedWriter( new java.io.OutputStreamWriter(new java.io.FileOutputStream(trainingFile), utf8)); for (File curTrainingFile : curFileSplit.getTrainingFiles()) { java.io.BufferedReader curTrainingFileReader = null; try { Charset fileCharset = FileUtils.determineCharset(curTrainingFile); if (fileCharset == null) { fileCharset = utf8; } curTrainingFileReader = new java.io.BufferedReader(new java.io.InputStreamReader( new java.io.FileInputStream(curTrainingFile), fileCharset)); while (curTrainingFileReader.ready()) { String curLine = curTrainingFileReader.readLine(); trainingFileWriter.append(curLine).append("\n"); } } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (curTrainingFileReader != null) { curTrainingFileReader.close(); } } } trainingFileWriter.write('\n'); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (trainingFileWriter != null) { try { trainingFileWriter.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } //create test file java.io.BufferedWriter testFileWriter = null; try { //create training file testFileWriter = new java.io.BufferedWriter( new java.io.OutputStreamWriter(new java.io.FileOutputStream(testFile), utf8)); for (File curTrainingFile : curFileSplit.getTestFiles()) { String testingFileName = curTrainingFile.getCanonicalPath(); textTestResults .setText(textTestResults.getText() + "testing with " + testingFileName + "\n"); java.io.BufferedReader curTrainingFileReader = null; try { Charset fileCharset = FileUtils.determineCharset(curTrainingFile); if (fileCharset == null) { fileCharset = utf8; } curTrainingFileReader = new java.io.BufferedReader(new java.io.InputStreamReader( new java.io.FileInputStream(curTrainingFile), fileCharset)); while (curTrainingFileReader.ready()) { String curLine = curTrainingFileReader.readLine(); testFileWriter.append(curLine).append("\n"); } } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (curTrainingFileReader != null) { curTrainingFileReader.close(); } } } testFileWriter.write('\n'); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (testFileWriter != null) { try { testFileWriter.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } //create and train model ObjectStream<String> trainingLineStream = null; TokenizerModel train = null; try { trainingLineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), utf8); ObjectStream<TokenSample> sampleStream = null; try { sampleStream = new TokenSampleStream(trainingLineStream); train = TokenizerME.train(sampleStream, myTokenizerFactory, TrainingParameters.defaultParams()); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (sampleStream != null) { try { sampleStream.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } } catch (FileNotFoundException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (trainingLineStream != null) { try { trainingLineStream.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } if (train != null) { ObjectStream<String> testingLineStream = null; try { testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), utf8); ObjectStream<TokenSample> sampleStream = null; try { sampleStream = new TokenSampleStream(testingLineStream); TokenizerME testDetector = new TokenizerME(train); TokenizerEvaluator evaluator = new TokenizerEvaluator(testDetector); evaluator.evaluate(sampleStream); FMeasure testFMeasure = evaluator.getFMeasure(); curFStats.addValue(testFMeasure.getFMeasure()); curRecallStats.addValue(testFMeasure.getRecallScore()); curPrecisionStats.addValue(testFMeasure.getPrecisionScore()); textTestResults.setText(textTestResults.getText() + testFMeasure.getFMeasure() + " " + testFMeasure.getPrecisionScore() + " " + testFMeasure.getRecallScore() + "\n"); if (stdTokenizer != null) { testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), utf8); sampleStream = new TokenSampleStream(testingLineStream); TokenizerEvaluator stdEvaluator = new TokenizerEvaluator(stdTokenizer); stdEvaluator.evaluate(sampleStream); FMeasure stdFMeasure = stdEvaluator.getFMeasure(); stdFStats.addValue(stdFMeasure.getFMeasure()); stdRecallStats.addValue(stdFMeasure.getRecallScore()); stdPrecisionStats.addValue(stdFMeasure.getPrecisionScore()); textTestResults.setText(textTestResults.getText() + " " + stdFMeasure.getFMeasure() + " " + stdFMeasure.getPrecisionScore() + " " + stdFMeasure.getRecallScore() + "\n"); } if (myNonSplitingTokenizer != null) { testingLineStream = new PlainTextByLineStream(new FileInputStream(testFile), utf8); sampleStream = new TokenSampleStream(testingLineStream); TokenizerEvaluator myNonSplitingEvaluator = new TokenizerEvaluator( myNonSplitingTokenizer); myNonSplitingEvaluator.evaluate(sampleStream); FMeasure myNonSplitFMeasure = myNonSplitingEvaluator.getFMeasure(); myNonSplitFStats.addValue(myNonSplitFMeasure.getFMeasure()); myNonSplitRecallStats.addValue(myNonSplitFMeasure.getRecallScore()); myNonSplitPrecisionStats.addValue(myNonSplitFMeasure.getPrecisionScore()); textTestResults .setText(textTestResults.getText() + " " + myNonSplitFMeasure.getFMeasure() + " " + myNonSplitFMeasure.getPrecisionScore() + " " + myNonSplitFMeasure.getRecallScore() + "\n"); } textTestResults.setText(textTestResults.getText() + "\n"); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (sampleStream != null) { try { sampleStream.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } } catch (FileNotFoundException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (testingLineStream != null) { try { testingLineStream.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } } } textTestResults.setText(textTestResults.getText() + "\n"); textTestResults.setText(textTestResults.getText() + "test model\n"); textTestResults.setText(textTestResults.getText() + "f score mean " + curFStats.getMean() + " stdDev " + curFStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "recall mean " + curRecallStats.getMean() + " stdDev " + curRecallStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "precision score mean " + curPrecisionStats.getMean() + " stdDev " + curPrecisionStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "std model\n"); textTestResults.setText(textTestResults.getText() + "f score mean " + stdFStats.getMean() + " stdDev " + stdFStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "recall mean " + stdRecallStats.getMean() + " stdDev " + stdRecallStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "precision score mean " + stdPrecisionStats.getMean() + " stdDev " + stdPrecisionStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "my non spliting model\n"); textTestResults.setText(textTestResults.getText() + "f score mean " + myNonSplitFStats.getMean() + " stdDev " + myNonSplitFStats.getStandardDeviation() + "\n"); textTestResults.setText(textTestResults.getText() + "recall mean " + myNonSplitRecallStats.getMean() + " stdDev " + myNonSplitRecallStats.getStandardDeviation() + "\n"); textTestResults.setText( textTestResults.getText() + "precision score mean " + myNonSplitPrecisionStats.getMean() + " stdDev " + myNonSplitPrecisionStats.getStandardDeviation() + "\n"); //create combinded training file trainingFileWriter = null; try { trainingFileWriter = new java.io.BufferedWriter( new java.io.OutputStreamWriter(new java.io.FileOutputStream(trainingFile), utf8)); for (File curTrainingFile : mFileCollectionListModel) { java.io.BufferedReader curTrainingFileReader = null; try { Charset fileCharset = FileUtils.determineCharset(curTrainingFile); if (fileCharset == null) { fileCharset = utf8; } curTrainingFileReader = new java.io.BufferedReader(new java.io.InputStreamReader( new java.io.FileInputStream(curTrainingFile), fileCharset)); while (curTrainingFileReader.ready()) { String curLine = curTrainingFileReader.readLine(); trainingFileWriter.append(curLine).append("\n"); } } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (curTrainingFileReader != null) { curTrainingFileReader.close(); } } } trainingFileWriter.write('\n'); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (trainingFileWriter != null) { try { trainingFileWriter.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } //create and train model ObjectStream<String> lineStream = null; this.createdObject = null; try { lineStream = new PlainTextByLineStream(new FileInputStream(trainingFile), utf8); ObjectStream<TokenSample> sampleStream = null; try { sampleStream = new TokenSampleStream(lineStream); this.createdObject = TokenizerME.train(sampleStream, myTokenizerFactory, TrainingParameters.defaultParams()); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (sampleStream != null) { try { sampleStream.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } } catch (FileNotFoundException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (lineStream != null) { try { lineStream.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } if (createdObject != null) { OutputStream modelOut = null; File modelFile = new File("en-fiction-token.bin"); try { modelOut = new BufferedOutputStream(new FileOutputStream(modelFile)); createdObject.serialize(modelOut); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } finally { if (modelOut != null) { try { modelOut.close(); } catch (IOException ex) { Logger.getLogger(SentenceDetectorTrainer.class.getName()).log(Level.SEVERE, null, ex); } } } } textTestResults.setText(textTestResults.getText() + "done"); }).start(); }