List of usage examples for java.io FileReader close
public void close() throws IOException
From source file:Evaluator.PerQueryRelDocs.java
void load(int startQid, int endQid, String relFileName, String irrelFileName, String vectorFolderLocation) throws Exception { FileReader fr = new FileReader(qrelsFile); BufferedReader br = new BufferedReader(fr); FileReader fr1 = new FileReader(vectorFolderLocation + startQid + ".txt"); BufferedReader br1 = new BufferedReader(fr1); String line1;//from w w w . j av a2s. c om String line; Integer d1 = startQid; line1 = br1.readLine(); while ((line = br.readLine()) != null) { if (line1 != null) { storeRelRcd(line, line1); line1 = br1.readLine(); } else { d1++; br1.close(); fr1.close(); // System.out.println(vectorFolderLocation + d1 + ".txt"); fr1 = new FileReader(vectorFolderLocation + d1 + ".txt"); br1 = new BufferedReader(fr1); line1 = br1.readLine(); storeRelRcd(line, line1); } } Integer d = startQid; for (d = startQid; d <= endQid; d++) { PerQueryRelDocs reldoc = perQueryRels.get(d.toString()); // System.out.println(reldoc); for (int i = 0; i < reldoc.relCentroid.size(); i++) { reldoc.relCentroid.set(i, reldoc.relCentroid.get(i) / reldoc.relMap.size()); } } br.close(); }
From source file:org.apache.hadoop.mapred.util.LinuxResourceCalculatorPlugin.java
/** * Read /proc/meminfo, parse and compute memory information * @param readAgain if false, read only on the first time *///from www.j a v a2 s. c om private void readProcMemInfoFile(boolean readAgain) { if (readMemInfoFile && !readAgain) { return; } // Read "/proc/memInfo" file BufferedReader in = null; FileReader fReader = null; try { fReader = new FileReader(procfsMemFile); in = new BufferedReader(fReader); } catch (FileNotFoundException f) { // shouldn't happen.... return; } Matcher mat = null; try { String str = in.readLine(); while (str != null) { mat = PROCFS_MEMFILE_FORMAT.matcher(str); if (mat.find()) { if (mat.group(1).equals(MEMTOTAL_STRING)) { ramSize = Long.parseLong(mat.group(2)); } else if (mat.group(1).equals(SWAPTOTAL_STRING)) { swapSize = Long.parseLong(mat.group(2)); } else if (mat.group(1).equals(MEMFREE_STRING)) { ramSizeFree = Long.parseLong(mat.group(2)); } else if (mat.group(1).equals(SWAPFREE_STRING)) { swapSizeFree = Long.parseLong(mat.group(2)); } else if (mat.group(1).equals(INACTIVE_STRING)) { inactiveSize = Long.parseLong(mat.group(2)); } } str = in.readLine(); } } catch (IOException io) { // LOG.warn("Error reading the stream " + io); } finally { // Close the streams try { fReader.close(); try { in.close(); } catch (IOException i) { // LOG.warn("Error closing the stream " + in); } } catch (IOException i) { // LOG.warn("Error closing the stream " + fReader); } } readMemInfoFile = true; }
From source file:org.gwaspi.reports.PlinkReportLoader.java
public static CombinedRangeXYPlot loadAssocUnadjLogPvsPos(File plinkReport, Set<String> redMarkersHS) throws IOException { XYSeriesCollection chrData = new XYSeriesCollection(); NumberAxis sharedAxis = new NumberAxis("-log?(P)"); CombinedRangeXYPlot combinedPlot = new CombinedRangeXYPlot(sharedAxis); combinedPlot.setGap(0);//from w w w . ja v a 2 s .c om XYSeries series1 = null; XYSeries series2 = null; FileReader inputFileReader = null; BufferedReader inputBufferReader = null; String tempChr = null; try { inputFileReader = new FileReader(plinkReport); inputBufferReader = new BufferedReader(inputFileReader); // Getting data from file and subdividing to series all points by chromosome String l; tempChr = ""; String header = inputBufferReader.readLine(); int count = 0; while ((l = inputBufferReader.readLine()) != null) { if (count % 10000 == 0) { log.info("loadAssocUnadjLogPvsPos -> reader count: {}", count); } count++; l = l.trim().replaceAll("\\s+", ","); String[] cVals = l.split(","); String markerId = cVals[1]; int position = Integer.parseInt(cVals[2]); String s_pVal = cVals[8]; if (!s_pVal.equals("NA")) { double logPValue = Math.abs(Math.log(Double.parseDouble(s_pVal)) / Math.log(10)); if (cVals[0].toString().equals(tempChr)) { if (redMarkersHS.contains(markerId)) { series2.add(position, logPValue); } else { series1.add(position, logPValue); } labeler.put(tempChr + "_" + position, markerId); } else { if (!tempChr.isEmpty()) { // SKIP FIRST TIME (NO DATA YET!) chrData.addSeries(series1); chrData.addSeries(series2); appendToCombinedRangePlot(combinedPlot, tempChr, chrData); } tempChr = cVals[0]; series1 = new XYSeries("Imputed"); series2 = new XYSeries("Observed"); labeler.put(tempChr + "_" + position, markerId); if (redMarkersHS.contains(markerId)) { series2.add(position, logPValue); } else { series1.add(position, logPValue); } } } } } finally { if (inputBufferReader != null) { inputBufferReader.close(); } else if (inputFileReader != null) { inputFileReader.close(); } } chrData.addSeries(series1); chrData.addSeries(series2); appendToCombinedRangePlot(combinedPlot, tempChr, chrData); // ADD LAST CHR TO PLOT return combinedPlot; }
From source file:i2p.bote.I2PBote.java
/** * Sets up a {@link I2PSession}, using the I2P destination stored on disk or creating a new I2P * destination if no key file exists./*from www . java2 s. c om*/ */ private void initializeSession() throws I2PSessionException { Properties sessionProperties = new Properties(); // set tunnel names sessionProperties.setProperty("inbound.nickname", "I2P-Bote"); sessionProperties.setProperty("outbound.nickname", "I2P-Bote"); if (configuration.isI2CPDomainSocketEnabled()) sessionProperties.setProperty("i2cp.domainSocket", "true"); // According to sponge, muxed depends on gzip, so leave gzip enabled // read the local destination key from the key file if it exists File destinationKeyFile = configuration.getDestinationKeyFile(); FileReader fileReader = null; try { fileReader = new FileReader(destinationKeyFile); char[] destKeyBuffer = new char[(int) destinationKeyFile.length()]; fileReader.read(destKeyBuffer); byte[] localDestinationKey = Base64.decode(new String(destKeyBuffer)); ByteArrayInputStream inputStream = new ByteArrayInputStream(localDestinationKey); socketManager = I2PSocketManagerFactory.createDisconnectedManager(inputStream, null, 0, sessionProperties); } catch (IOException e) { log.debug("Destination key file doesn't exist or isn't readable." + e); } catch (I2PSessionException e) { // Won't happen, inputStream != null } finally { if (fileReader != null) try { fileReader.close(); } catch (IOException e) { log.debug("Error closing file: <" + destinationKeyFile.getAbsolutePath() + ">" + e); } } // if the local destination key can't be read or is invalid, create a new one if (socketManager == null) { log.debug("Creating new local destination key"); try { ByteArrayOutputStream arrayStream = new ByteArrayOutputStream(); i2pClient.createDestination(arrayStream); byte[] localDestinationKey = arrayStream.toByteArray(); ByteArrayInputStream inputStream = new ByteArrayInputStream(localDestinationKey); socketManager = I2PSocketManagerFactory.createDisconnectedManager(inputStream, null, 0, sessionProperties); saveLocalDestinationKeys(destinationKeyFile, localDestinationKey); } catch (I2PException e) { log.error("Error creating local destination key.", e); } catch (IOException e) { log.error("Error writing local destination key to file.", e); } } i2pSession = socketManager.getSession(); // Throws I2PSessionException if the connection fails i2pSession.connect(); Destination localDestination = i2pSession.getMyDestination(); log.info("Local destination key (base64): " + localDestination.toBase64()); log.info("Local destination hash (base64): " + localDestination.calculateHash().toBase64()); log.info("Local destination hash (base32): " + Util.toBase32(localDestination)); }
From source file:org.apache.sling.maven.slingstart.ModelPreprocessor.java
/** * Search for dependent slingstart/slingfeature artifacts and remove them from the effective model. * @throws MavenExecutionException//from w w w.ja v a2 s . c o m */ private List<Model> searchSlingstartDependencies(final Environment env, final ProjectInfo info, final Model rawModel, final Model effectiveModel) throws MavenExecutionException { // slingstart or slingfeature final List<Model> dependencies = new ArrayList<Model>(); for (final Feature feature : effectiveModel.getFeatures()) { for (final RunMode runMode : feature.getRunModes()) { for (final ArtifactGroup group : runMode.getArtifactGroups()) { final List<org.apache.sling.provisioning.model.Artifact> removeList = new ArrayList<org.apache.sling.provisioning.model.Artifact>(); for (final org.apache.sling.provisioning.model.Artifact a : group) { if (a.getType().equals(BuildConstants.PACKAGING_SLINGSTART) || a.getType().equals(BuildConstants.PACKAGING_PARTIAL_SYSTEM)) { final Dependency dep = new Dependency(); dep.setGroupId(a.getGroupId()); dep.setArtifactId(a.getArtifactId()); dep.setVersion(a.getVersion()); dep.setType(BuildConstants.PACKAGING_PARTIAL_SYSTEM); if (a.getType().equals(BuildConstants.PACKAGING_SLINGSTART)) { dep.setClassifier(BuildConstants.PACKAGING_PARTIAL_SYSTEM); } else { dep.setClassifier(a.getClassifier()); } dep.setScope(Artifact.SCOPE_PROVIDED); env.logger.debug("- adding dependency " + ModelUtils.toString(dep)); info.project.getDependencies().add(dep); // if it's a project from the current reactor build, we can't resolve it right now final String key = a.getGroupId() + ":" + a.getArtifactId(); final ProjectInfo depInfo = env.modelProjects.get(key); if (depInfo != null) { env.logger.debug("Found reactor " + a.getType() + " dependency : " + a); final Model model = addDependencies(env, depInfo); if (model == null) { throw new MavenExecutionException( "Recursive model dependency list including project " + info.project, (File) null); } dependencies.add(model); info.includedModels.put(a, depInfo.localModel); } else { env.logger.debug("Found external " + a.getType() + " dependency: " + a); // "external" dependency, we can already resolve it final File modelFile = resolveSlingstartArtifact(env, info.project, dep); FileReader r = null; try { r = new FileReader(modelFile); final Model model = ModelReader.read(r, modelFile.getAbsolutePath()); info.includedModels.put(a, model); final Map<Traceable, String> errors = ModelUtility.validate(model); if (errors != null) { throw new MavenExecutionException( "Unable to read model file from " + modelFile + " : " + errors, modelFile); } final Model fullModel = processSlingstartDependencies(env, info, dep, model); dependencies.add(fullModel); } catch (final IOException ioe) { throw new MavenExecutionException("Unable to read model file from " + modelFile, ioe); } finally { try { if (r != null) { r.close(); } } catch (final IOException io) { // ignore } } } removeList.add(a); } } for (final org.apache.sling.provisioning.model.Artifact r : removeList) { group.remove(r); final Feature localModelFeature = rawModel.getFeature(feature.getName()); if (localModelFeature != null) { final RunMode localRunMode = localModelFeature.getRunMode(runMode.getNames()); if (localRunMode != null) { final ArtifactGroup localAG = localRunMode.getArtifactGroup(group.getStartLevel()); if (localAG != null) { localAG.remove(r); } } } } } } } return dependencies; }
From source file:org.agmip.ui.afsirs.frames.SWFrame.java
private void readSoilDataJsonFileForUtils(File latestFile) throws FileNotFoundException { SoilData soilData = utils.getSoilData();//SoilData.getSoilDataInstance(); if (soilData.getSoilsFromFile(latestFile.getName().trim()) != null) return;//from w w w. jav a 2 s . c o m String str = ""; SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss"); str += "Data Date: " + sdf.format(latestFile.lastModified()) + "\n"; ObjectMapper mapper = new ObjectMapper(); JsonNode root = null; FileReader fr = null; try { fr = new FileReader(latestFile); root = mapper.readTree(fr); } catch (FileNotFoundException e) { throw new FileNotFoundException(Messages.FILE_NOT_FOUND_MESSAGE); } catch (IOException e) { e.printStackTrace(); } finally { if (fr != null) { try { fr.close(); } catch (IOException e) { e.printStackTrace(); } } } JsonNode soils = root.path("soils"); int row = 0; int whcIndex = waterholdcapacityBox.getSelectedIndex(); ArrayList<Soil> soilList = new ArrayList<Soil>(); for (JsonNode n : soils) { String soilSeriesName = n.path("mukeyName").textValue(); String soilSeriesKey = n.path("mukey").textValue(); String soilName = n.path("soilName").textValue(); String compKey = n.path("cokey").textValue(); String soilTypeArea = n.path("compArea").textValue(); JsonNode soilLayersNodes = n.path("soilLayer"); int NL = 0; double[] wc = new double[6]; double[] wcl = new double[6]; double[] wcu = new double[6]; double[] du = new double[6]; String[] txt = new String[3]; for (JsonNode node : soilLayersNodes) { //System.out.println ("NL we are looking for: " + NL); wcu[NL] = node.get("sldul").asDouble() / 100.00; du[NL] = node.get("sllb").asDouble() * 0.39370; du[NL] = Math.floor(du[NL] * 1000) / 1000; wcl[NL] = node.get("slll").asDouble() / 100.00; if (whcIndex == 0) { wc[NL] = wcl[NL]; } else if (whcIndex == 2) { wc[NL] = wcu[NL]; } else { wc[NL] = 0.5 * (wcl[NL] + wcu[NL]); } wc[NL] = Math.floor(wc[NL] * 1000) / 1000; NL++; } // soilSeriesKey Soil soil = new Soil(row, soilName, soilSeriesKey, compKey, soilSeriesName, NL); soil.setValues(wc, wcl, wcu, du, txt); if (soilTypeArea != null) soil.setSoilTypeArea(Double.valueOf(soilTypeArea)); else soil.setSoilTypeArea(0.0); //soilData.addSoil(latestFile.getName(), soil); soilList.add(soil); row++; } if (soilList.size() > 0) utils.setDefaultSoil(soilList.get(0)); // Here we prepare teh soilData.addSoilList(latestFile.getName(), soilList); }
From source file:guineu.modules.filter.report.qualityReport.ReportTask.java
/** * Read the file with the name of the samples in order * @throws java.lang.Exception/* w w w . j a v a2 s. co m*/ */ private List<sample> readFile() throws Exception { List<sample> Samples = new ArrayList<sample>(); FileReader fr = null; try { fr = new FileReader(new File(fileName)); } catch (Exception e) { throw e; } CsvReader reader = new CsvReader(fr); String splitRow[]; sample newSample = null; while (reader.readRecord()) { splitRow = reader.getValues(); try { if (splitRow[0].matches(".*Sample Name.*")) { newSample = new sample(); newSample.setSampleName(splitRow[0]); } if (splitRow[2].matches(".*lysoPC_50.*")) { newSample.setLysoPC(splitRow); } else if (splitRow[2].matches(".*PC_50.*")) { newSample.setPC(splitRow); } else if (splitRow[2].matches(".*TG_50.*")) { newSample.setTG(splitRow); } else if (splitRow[2].matches(".*LPC.*")) { newSample.setLPC(splitRow); } else if (splitRow[2].matches(".*PCD.*")) { newSample.setPCD(splitRow); } else if (splitRow[2].matches(".*TGC.*")) { newSample.setTGC(splitRow); Samples.add(newSample); } } catch (Exception e) { } } reader.close(); fr.close(); return Samples; }
From source file:org.kuali.kfs.gl.batch.service.impl.PosterServiceImpl.java
/** * Post scrubbed GL entries to GL tables. *//* w w w .j a va 2 s . co m*/ @Override public void postMainEntries() { LOG.debug("postMainEntries() started"); Date runDate = dateTimeService.getCurrentSqlDate(); try { FileReader INPUT_GLE_FILE = new FileReader(batchFileDirectoryName + File.separator + GeneralLedgerConstants.BatchFileSystem.POSTER_INPUT_FILE + GeneralLedgerConstants.BatchFileSystem.EXTENSION); File OUTPUT_ERR_FILE = new File(batchFileDirectoryName + File.separator + GeneralLedgerConstants.BatchFileSystem.POSTER_ERROR_OUTPUT_FILE + GeneralLedgerConstants.BatchFileSystem.EXTENSION); postEntries(PosterService.MODE_ENTRIES, INPUT_GLE_FILE, null, OUTPUT_ERR_FILE); INPUT_GLE_FILE.close(); } catch (FileNotFoundException e1) { e1.printStackTrace(); throw new RuntimeException("PosterMainEntries Stopped: " + e1.getMessage(), e1); } catch (IOException ioe) { LOG.error("postMainEntries stopped due to: " + ioe.getMessage(), ioe); throw new RuntimeException(ioe); } }
From source file:org.kuali.kfs.gl.batch.service.impl.PosterServiceImpl.java
/** * Post ICR GL entries to GL tables.//from w ww. ja v a 2s .co m */ @Override public void postIcrEntries() { LOG.debug("postIcrEntries() started"); Date runDate = dateTimeService.getCurrentSqlDate(); try { FileReader INPUT_GLE_FILE = new FileReader(batchFileDirectoryName + File.separator + GeneralLedgerConstants.BatchFileSystem.ICR_POSTER_INPUT_FILE + GeneralLedgerConstants.BatchFileSystem.EXTENSION); File OUTPUT_ERR_FILE = new File(batchFileDirectoryName + File.separator + GeneralLedgerConstants.BatchFileSystem.ICR_POSTER_ERROR_OUTPUT_FILE + GeneralLedgerConstants.BatchFileSystem.EXTENSION); postEntries(PosterService.MODE_ICR, INPUT_GLE_FILE, null, OUTPUT_ERR_FILE); INPUT_GLE_FILE.close(); } catch (FileNotFoundException e1) { e1.printStackTrace(); throw new RuntimeException("PosterIcrEntries Stopped: " + e1.getMessage(), e1); } catch (IOException ioe) { LOG.error("postIcrEntries stopped due to: " + ioe.getMessage(), ioe); throw new RuntimeException(ioe); } }
From source file:ca.sqlpower.object.annotation.SPAnnotationProcessor.java
/** * Generates and returns the GPL license header in a comment, as Eclipse * does whenever a new source file is created. The license is taken from * src/license_in_comment.txt.//from w w w. ja v a2 s. co m */ private String generateLicense() { StringBuilder sb = new StringBuilder(); try { FileReader fr = new FileReader(LICENSE_COMMENT_FILE_PATH); BufferedReader br = new BufferedReader(fr); String line; while ((line = br.readLine()) != null) { niprintln(sb, line); } fr.close(); br.close(); } catch (FileNotFoundException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } return sb.toString(); }