List of usage examples for java.util Scanner close
public void close()
From source file:css.variable.converter.CSSVariableConverter.java
/** * Collect variables from main file//from w w w.ja v a 2 s.c o m * * @param mainFile File with ":root" identifier for variables. * @return */ private static ArrayList<CSSVar> getCSSVars(File mainFile, String identifier) { ArrayList<CSSVar> cSSVarsList = new ArrayList<CSSVar>(); boolean isInVarDefinition = false; boolean isFinished = false; try { Scanner fileReader = new Scanner(mainFile); // Loop until end of text file, or outside of :root while (fileReader.hasNextLine() && !isFinished) { String currentLine = fileReader.nextLine(); if (isInVarDefinition) { // I identify all of my variables with -- if (currentLine.contains("--")) { int varNameBegIndex = currentLine.indexOf("--"); int varNameEndIndex = currentLine.indexOf(":"); String varName = currentLine.substring(varNameBegIndex, varNameEndIndex); int varValueBegIndex = currentLine.indexOf(":") + 1; int varValueEndIndex = currentLine.indexOf(";"); String varValue = currentLine.substring(varValueBegIndex, varValueEndIndex); varName = varName.trim(); varValue = varValue.trim(); cSSVarsList.add(new CSSVar(varName, varValue)); } if (currentLine.contains("}")) { isInVarDefinition = false; isFinished = true; } } else { if (currentLine.contains(identifier)) { isInVarDefinition = true; } } } fileReader.close(); } catch (FileNotFoundException ex) { Logger.getLogger(CSSVariableConverter.class.getName()).log(Level.SEVERE, null, ex); } return cSSVarsList; }
From source file:com.joliciel.csvLearner.features.NormalisationLimitReader.java
private void readCSVFile(InputStream csvInputStream, Map<String, Float> featureToMaxMap) { Scanner scanner = new Scanner(csvInputStream, "UTF-8"); try {/*from w w w .j av a 2 s . co m*/ boolean firstLine = true; while (scanner.hasNextLine()) { String line = scanner.nextLine(); if (!firstLine) { List<String> cells = CSVFormatter.getCSVCells(line); String featureName = cells.get(0); float maxValue = Float.parseFloat(cells.get(1)); featureToMaxMap.put(featureName, maxValue); } firstLine = false; } } finally { scanner.close(); } }
From source file:hu.sztaki.incremental.ml.streaming.imsr.MatrixVectorPairSource.java
@Override public void invoke(Collector<Tuple2<double[][], double[][]>> out) throws Exception { File f = new File(path); if (!f.exists()) { System.err.println(path + " does not exist."); System.exit(1);//from w w w .j a va2 s . com } Scanner s = initCsvScanner(new Scanner(f)); String firstLine = s.nextLine(); Scanner firstLineScanner = initCsvScanner(new Scanner(firstLine)); for (indepDim = 0; firstLineScanner.hasNext(); firstLineScanner.next(), indepDim++) ; indepDim--; while (s.hasNext()) { Array2DRowRealMatrix X = new Array2DRowRealMatrix(batchSize, indepDim); Array2DRowRealMatrix y = new Array2DRowRealMatrix(batchSize, 1); readMatricesSideBySide(s, X, y); out.collect(new Tuple2<double[][], double[][]>(X.getDataRef(), y.getDataRef())); } s.close(); out.close(); }
From source file:io.proleap.cobol.preprocessor.sub.document.impl.CobolDocumentParserListenerImpl.java
protected String buildLines(final String text, final String linePrefix) { final StringBuffer sb = new StringBuffer(text.length()); final Scanner scanner = new Scanner(text); boolean firstLine = true; while (scanner.hasNextLine()) { final String line = scanner.nextLine(); if (!firstLine) { sb.append(CobolPreprocessor.NEWLINE); }/*from ww w . j ava 2 s . c om*/ sb.append(linePrefix + CobolPreprocessor.WS + line.trim()); firstLine = false; } scanner.close(); return sb.toString(); }
From source file:eu.cassandra.utils.Utils.java
/** * This function is used when the user has already tracked the electrical * appliances installed in the installation. He can used them as a base case * and extend it with any additional ones that may be found during the later * stages of analysis of the consumption. * // w ww.ja v a 2s . c o m * @param filename * The filename of the file containing the appliances. * @return * A list of appliances * @throws FileNotFoundException */ public static ArrayList<Appliance> appliancesFromFile(String filename) throws FileNotFoundException { // Read appliance file and start appliance parsing File file = new File(filename); Scanner input = new Scanner(file); ArrayList<Appliance> appliances = new ArrayList<Appliance>(); String nextLine; String[] line; while (input.hasNextLine()) { nextLine = input.nextLine(); line = nextLine.split(","); String name = line[0]; String activity = line[1]; if (activity.contains("Standby") == false && activity.contains("Refrigeration") == false) { double p = Double.parseDouble(line[2]); double q = Double.parseDouble(line[3]); // For each appliance found in the file, an temporary Appliance // Entity is created. appliances.add(new Appliance(name, activity, p, q, 0, 100)); } } System.out.println("Appliances:" + appliances.size()); input.close(); return appliances; }
From source file:azkaban.jobtype.ReportalHiveRunner.java
@Override protected void runReportal() throws Exception { System.out.println("Reportal Hive: Setting up Hive"); HiveConf conf = new HiveConf(SessionState.class); if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) { conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION")); }/*from w w w .j a v a2 s.c om*/ File tempTSVFile = new File("./temp.tsv"); OutputStream tsvTempOutputStream = new BoundedOutputStream( new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity); PrintStream logOut = System.out; // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded // criccomini@linkedin.com: I disabled this because it appears to swallow // all future logging (even outside of hive). // SessionState.initHiveLog4j(); String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); CliSessionState sessionState = new CliSessionState(conf); sessionState.in = System.in; sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8"); sessionState.err = new PrintStream(logOut, true, "UTF-8"); OptionsProcessor oproc = new OptionsProcessor(); // Feed in Hive Args String[] args = buildHiveArgs(); if (!oproc.process_stage1(args)) { throw new Exception("unable to parse options stage 1"); } if (!oproc.process_stage2(sessionState)) { throw new Exception("unable to parse options stage 2"); } // Set all properties specified via command line for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } SessionState.start(sessionState); String expanded = expandHiveAuxJarsPath(orig); if (orig == null || orig.equals(expanded)) { System.out.println("Hive aux jars variable not expanded"); } else { System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]"); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded); } if (!ShimLoader.getHadoopShims().usesJobShell()) { // hadoop-20 and above - we need to augment classpath using hiveconf // components // see also: code in ExecDriver.java ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); System.out.println("Got auxJars = " + auxJars); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } CliDriver cli = new CliDriver(); int returnValue = 0; String prefix = ""; returnValue = cli.processLine("set hive.cli.print.header=true;"); String[] queries = jobQuery.split("\n"); for (String line : queries) { if (!prefix.isEmpty()) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; line = injectVariables(line); System.out.println("Reportal Hive: Running Hive Query: " + line); System.out.println("Reportal Hive: HiveConf HIVEAUXJARS: " + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS)); returnValue = cli.processLine(line); prefix = ""; } else { prefix = prefix + line; continue; } } tsvTempOutputStream.close(); // convert tsv to csv and write it do disk System.out.println("Reportal Hive: Converting output"); InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile)); Scanner rowScanner = new Scanner(tsvTempInputStream); PrintStream csvOutputStream = new PrintStream(outputStream); while (rowScanner.hasNextLine()) { String tsvLine = rowScanner.nextLine(); // strip all quotes, and then quote the columns csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\""); } rowScanner.close(); csvOutputStream.close(); // Flush the temp file out tempTSVFile.delete(); if (returnValue != 0) { throw new Exception("Hive query finished with a non zero return code"); } System.out.println("Reportal Hive: Ended successfully"); }
From source file:gov.usgs.cida.coastalhazards.rest.data.util.MetadataUtilTest.java
private String loadResourceAsString(String fileName) throws IOException { Scanner scanner = new Scanner(getClass().getClassLoader().getResourceAsStream(fileName)); String contents = scanner.useDelimiter("\\A").next(); scanner.close(); return contents; }
From source file:edu.ucuenca.authorsrelatedness.Distance.java
public synchronized String Http(String s) throws SQLException, IOException { String get = Cache.getInstance().get(s); String resp = ""; if (get != null) { //System.out.print("."); resp = get;//ww w. ja v a2 s . c om } else { final URL url = new URL(s); final URLConnection connection = url.openConnection(); connection.setConnectTimeout(60000); connection.setReadTimeout(60000); connection.addRequestProperty("User-Agent", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:44.0) Gecko/20100101 Firefox/44.0"); connection.addRequestProperty("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"); final Scanner reader = new Scanner(connection.getInputStream(), "UTF-8"); while (reader.hasNextLine()) { final String line = reader.nextLine(); resp += line + "\n"; } reader.close(); Cache.getInstance().put(s, resp); } return resp; }
From source file:edu.ucuenca.authorsrelatedness.Distance.java
public synchronized String Http2(String s, Map<String, String> mp) throws SQLException, IOException { String md = s + mp.toString(); String get = Cache.getInstance().get(md); String resp = ""; if (get != null) { resp = get;/*from w w w . j av a2 s . co m*/ } else { HttpClient client = new HttpClient(); PostMethod method = new PostMethod(s); method.getParams().setContentCharset("utf-8"); //Add any parameter if u want to send it with Post req. for (Entry<String, String> mcc : mp.entrySet()) { method.addParameter(mcc.getKey(), mcc.getValue()); } int statusCode = client.executeMethod(method); if (statusCode != -1) { InputStream in = method.getResponseBodyAsStream(); final Scanner reader = new Scanner(in, "UTF-8"); while (reader.hasNextLine()) { final String line = reader.nextLine(); resp += line + "\n"; } reader.close(); Cache.getInstance().put(md, resp); } } return resp; }
From source file:com.mucommander.job.FindFileJob.java
private boolean fileContainsString0(AbstractFile f) { //Profiler.start("check_old"); if (fileContent == null || fileContent.isEmpty()) { return true; }/*from w w w . j a v a2 s . c om*/ if (f.isDirectory()) { return false; } Scanner in = null; boolean result = false; try { in = new Scanner(f.getInputStream()); while (in.hasNextLine() && !result) { String line = in.nextLine(); if (!caseSensitive) { line = line.toLowerCase(); } result = line.contains(fileContent); } } catch (IOException e) { e.printStackTrace(); } finally { try { if (in != null) { in.close(); } } catch (Exception e) { e.printStackTrace(); } } //Profiler.stop("check_old"); return result; }