List of usage examples for java.io BufferedWriter append
public Writer append(CharSequence csq) throws IOException
From source file:org.ihtsdo.classifier.ClassificationRunner.java
/** * Write r f2 type line.//from w w w .j av a 2 s. c o m * * @param bw the bw * @param relationshipId the relationship id * @param effectiveTime the effective time * @param active the active * @param moduleId the module id * @param sourceId the source id * @param destinationId the destination id * @param relationshipGroup the relationship group * @param relTypeId the rel type id * @param characteristicTypeId the characteristic type id * @param modifierId the modifier id * @throws java.io.IOException Signals that an I/O exception has occurred. */ public void writeRF2TypeLine(BufferedWriter bw, String relationshipId, String effectiveTime, String active, String moduleId, String sourceId, String destinationId, int relationshipGroup, String relTypeId, String characteristicTypeId, String modifierId) throws IOException { bw.append(relationshipId + "\t" + effectiveTime + "\t" + active + "\t" + moduleId + "\t" + sourceId + "\t" + destinationId + "\t" + relationshipGroup + "\t" + relTypeId + "\t" + characteristicTypeId + "\t" + modifierId); bw.append("\r\n"); }
From source file:com.qspin.qtaste.ui.xmleditor.TestRequirementEditor.java
public void save() { File xmlFile = new File(currentXMLFile); String path = xmlFile.getParent(); BufferedWriter output = null; try {//from w w w . j a v a 2 s . com String outputFile = path + File.separator + StaticConfiguration.TEST_REQUIREMENTS_FILENAME; output = new BufferedWriter(new FileWriter(new File(outputFile))); output.write("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>"); output.newLine(); output.write("<" + XMLFile.ROOT_ELEMENT + ">"); for (TestRequirement req : m_TestRequirementModel.getRequirements()) { output.newLine(); output.write("\t<" + XMLFile.REQUIREMENT_ELEMENT + " "); output.append(XMLFile.REQUIREMENT_ID + "=\""); output.append(req.getIdEscapeXml()); output.append("\">"); for (String dataId : req.getDataId()) { if (dataId.equals(TestRequirement.ID)) { continue; } output.newLine(); output.append("\t\t<" + dataId.replace(" ", XMLFile.SPACE_REPLACEMENT) + ">"); output.append(req.getDataEscapeXml(dataId)); output.append("</" + dataId.replace(" ", XMLFile.SPACE_REPLACEMENT) + ">"); } output.newLine(); output.append("\t</" + XMLFile.REQUIREMENT_ELEMENT + ">"); } output.newLine(); output.write("</" + XMLFile.ROOT_ELEMENT + ">"); output.close(); } catch (IOException ex) { logger.error(ex.getMessage()); } finally { try { if (output != null) output.close(); } catch (IOException ex) { logger.error(ex.getMessage()); } } // reload loadXMLFile(currentXMLFile); setModified(false); }
From source file:org.kuali.ole.gl.batch.BatchSortUtil.java
private static void mergeFiles(File tempSortDir, int numFiles, String outputFileName, Comparator<String> comparator) { try {/* w w w . j a v a2 s . c om*/ ArrayList<FileReader> mergefr = new ArrayList<FileReader>(numFiles); ArrayList<BufferedReader> mergefbr = new ArrayList<BufferedReader>(numFiles); // temp buffer for writing - contains the minimum record from each file ArrayList<String> fileRows = new ArrayList<String>(numFiles); BufferedWriter bw = new BufferedWriter(new FileWriter(outputFileName)); boolean someFileStillHasRows = false; // Iterate over all the files, getting the first line in each file for (int i = 0; i < numFiles; i++) { // open a file reader for each file mergefr.add(new FileReader(new File(tempSortDir, "chunk_" + i))); mergefbr.add(new BufferedReader(mergefr.get(i))); // get the first row String line = mergefbr.get(i).readLine(); if (line != null) { fileRows.add(line); someFileStillHasRows = true; } else { fileRows.add(null); } } while (someFileStillHasRows) { String min = null; int minIndex = 0; // index of the file with the minimum record // init for later compare - assume the first file has the minimum String line = fileRows.get(0); if (line != null) { min = line; minIndex = 0; } else { min = null; minIndex = -1; } // determine the minimum record of the top lines of each file // check which one is min for (int i = 1; i < fileRows.size(); i++) { line = fileRows.get(i); if (line != null) { if (min != null) { if (comparator.compare(line, min) < 0) { minIndex = i; min = line; } } else { min = line; minIndex = i; } } } if (minIndex < 0) { someFileStillHasRows = false; } else { // write to the sorted file bw.append(fileRows.get(minIndex)).append('\n'); // get another row from the file that had the min line = mergefbr.get(minIndex).readLine(); if (line != null) { fileRows.set(minIndex, line); } else { // file is out of rows, set to null so it is ignored fileRows.set(minIndex, null); } } // check if one still has rows for (int i = 0; i < fileRows.size(); i++) { someFileStillHasRows = false; if (fileRows.get(i) != null) { if (minIndex < 0) { throw new RuntimeException( "minIndex < 0 and row found in chunk file " + i + " : " + fileRows.get(i)); } someFileStillHasRows = true; break; } } // check the actual files one more time if (!someFileStillHasRows) { //write the last one not covered above for (int i = 0; i < fileRows.size(); i++) { if (fileRows.get(i) == null) { line = mergefbr.get(i).readLine(); if (line != null) { someFileStillHasRows = true; fileRows.set(i, line); } } } } } // close all the files bw.close(); for (BufferedReader br : mergefbr) { br.close(); } for (FileReader fr : mergefr) { fr.close(); } } catch (Exception ex) { LOG.error("Exception merging the sorted files", ex); throw new RuntimeException("Exception merging the sorted files", ex); } }
From source file:org.gwaspi.statistics.ChiSqrBoundaryCalculator.java
protected static void calculateChisqrBoundaryBySampling() throws IOException { FileWriter repFW = new FileWriter(boundaryPath); BufferedWriter repBW = new BufferedWriter(repFW); NetcdfFile ncfile = NetcdfFile.open(netCDFFile); List<Dimension> dims = ncfile.getDimensions(); Dimension sizeDim = dims.get(0); Dimension simsDim = dims.get(1); String varName = "distributions"; Variable distributions = ncfile.findVariable(varName); try {// ww w . j a va 2s . c om for (int i = 0; i < pointsNb; i++) { //distributions(i:i:1, 0:simsNb:1) ArrayDouble.D2 rdDoubleArrayD2 = (ArrayDouble.D2) distributions .read(i + ":" + i + ":1, 0:" + (simsDim.getLength() - 1) + ":1"); ArrayDouble.D1 rdDoubleArrayD1 = (D1) rdDoubleArrayD2.reduce(); SortedSet<Double> currentTS = new TreeSet<Double>(); for (int j = 0; j < rdDoubleArrayD2.getSize(); j++) { currentTS.add(rdDoubleArrayD1.get(j)); } double currentTot = 0; int loCount = 0; double low95 = 0; int topCount = 0; double top95 = 0; for (Double key : currentTS) { long lowLimit = Math.round(simNb * lowFrac) - 1; if (loCount == lowLimit) { low95 = key; loCount++; } else { loCount++; } long uppLimit = Math.round(simNb * uppFrac) - 1; if (topCount == currentTS.size() - uppLimit) { top95 = key; topCount++; } else { topCount++; } currentTot += key; } double avg = currentTot / simNb; StringBuilder sb = new StringBuilder(); sb.append(top95); sb.append(","); sb.append(avg); sb.append(","); sb.append(low95); repBW.append(sb + "\n"); } } catch (IOException ex) { log.error("Cannot read data", ex); } catch (InvalidRangeException ex) { log.error("Cannot read data", ex); } repBW.close(); repFW.close(); log.info("Confidence boundary created for {} points", N); }
From source file:org.n52.wps.server.request.ExecuteRequest.java
private void storeRequest(CaseInsensitiveMap map) { BufferedWriter w = null; ByteArrayOutputStream os = null; ByteArrayInputStream is = null; try {/*from www. ja v a 2 s . co m*/ os = new ByteArrayOutputStream(); w = new BufferedWriter(new OutputStreamWriter(os)); for (Object key : map.keySet()) { Object value = map.get(key); String valueString = ""; if (value instanceof String[]) { valueString = ((String[]) value)[0]; } else { valueString = value.toString(); } w.append(key.toString()).append('=').append(valueString); w.newLine(); } w.flush(); is = new ByteArrayInputStream(os.toByteArray()); DatabaseFactory.getDatabase().insertRequest(getUniqueId().toString(), is, false); } catch (Exception e) { LOGGER.error("Exception storing ExecuteRequest", e); } finally { IOUtils.closeQuietly(w); IOUtils.closeQuietly(os); IOUtils.closeQuietly(is); } }
From source file:ibme.sleepap.recording.SignalsRecorder.java
@Override public void onSensorChanged(SensorEvent event) { synchronized (this) { // Checking which type of sensor called this listener // In this case it is the Accelerometer (the next is the Magnetomer) if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { accelerometerCurrentTime = System.currentTimeMillis(); if (accelerometerCurrentTime - lastAccelerometerReadTime > (1000 / Constants.PARAM_SAMPLERATE_ACCELEROMETER - 1000 / (Constants.PARAM_SAMPLERATE_ACCELEROMETER * Constants.PARAM_UPSAMPLERATE_ACCELEROMETER))) { lastAccelerometerReadTime = accelerometerCurrentTime; float xRaw = event.values[0]; float yRaw = event.values[1]; float zRaw = event.values[2]; // Extracts unwanted gravity component from the // accelerometer signal. float alpha = Constants.PARAM_GRAVITY_FILTER_COEFFICIENT; runningGravityComponents[0] = runningGravityComponents[0] * alpha + (1 - alpha) * xRaw; runningGravityComponents[1] = runningGravityComponents[1] * alpha + (1 - alpha) * yRaw; runningGravityComponents[2] = runningGravityComponents[2] * alpha + (1 - alpha) * zRaw; float xAccel = xRaw - runningGravityComponents[0]; float yAccel = yRaw - runningGravityComponents[1]; float zAccel = zRaw - runningGravityComponents[2]; double magnitudeSquare = xAccel * xAccel + yAccel * yAccel + zAccel * zAccel; double magnitude = Math.sqrt(magnitudeSquare); actigraphyQueue.add(magnitude); int secsToDisplay = Integer.parseInt(sharedPreferences.getString(Constants.PREF_GRAPH_SECONDS, Constants.DEFAULT_GRAPH_RANGE)); int numberExtraSamples = actigraphyQueue.size() - (secsToDisplay * Constants.PARAM_SAMPLERATE_ACCELEROMETER); if (numberExtraSamples > 0) { for (int i = 0; i < numberExtraSamples; i++) { actigraphyQueue.remove(); }//from w ww .j a va2 s . c om } // Saves accelerometer data, necessary for the orientation // computation System.arraycopy(event.values, 0, latestAccelerometerEventValues, 0, 3); pushBackAccelerometerValues(xRaw, yRaw, zRaw); if (accelerometerCurrentTime - lastAccelerometerRecordedTime > Constants.PARAM_ACCELEROMETER_RECORDING_PERIOD + 1000 / Constants.PARAM_SAMPLERATE_ACCELEROMETER) { if (startRecordingFlag) { writeActigraphyLogVariance(); } gravitySum = gravitySquaredSum = 0; varianceCounter = 0; lastAccelerometerRecordedTime = accelerometerCurrentTime; } if (startRecordingFlag) { writeRawActigraphy(); } } } // Checking if the Magnetomer called this listener if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { // Copying magnetometer measures. System.arraycopy(event.values, 0, mGeoMags, 0, 3); if (SensorManager.getRotationMatrix(mRotationM, null, latestAccelerometerEventValues, mGeoMags)) { SensorManager.getOrientation(mRotationM, mOrientation); // Finding current orientation requires both Accelerometer // (using the previous measure) and Magnetometer data. // Converting radians to degrees (yaw, pitch, roll) mOrientation[0] = mOrientation[0] * Constants.CONST_DEGREES_PER_RADIAN; mOrientation[1] = mOrientation[1] * Constants.CONST_DEGREES_PER_RADIAN; mOrientation[2] = mOrientation[2] * Constants.CONST_DEGREES_PER_RADIAN; // The values (1,2,3,4) attributed for // supine/prone/left/right match the // ones attributed in VISI text files. int positionValue = 0; // Supine (4). if (-45 < mOrientation[1] && mOrientation[1] < 45 && -45 < mOrientation[2] && mOrientation[2] < 45) { positionValue = Constants.CODE_POSITION_SUPINE; position = Position.Supine; } // Prone (1). if ((((-180 < mOrientation[2] && mOrientation[2] < -135) || (135 < mOrientation[2] && mOrientation[2] < 180)) && -45 < mOrientation[1] && mOrientation[1] < 45)) { positionValue = Constants.CODE_POSITION_PRONE; position = Position.Prone; } // Right (2). if (-90 < mOrientation[2] && mOrientation[2] < -45) { positionValue = Constants.CODE_POSITION_RIGHT; position = Position.Right; } // Left (3). if (45 < mOrientation[2] && mOrientation[2] < 90) { positionValue = Constants.CODE_POSITION_LEFT; position = Position.Left; } // Sitting up (5). if ((((-135 < mOrientation[1] && mOrientation[1] < -45) || (45 < mOrientation[1] && mOrientation[1] < 135)) && -45 < mOrientation[2] && mOrientation[2] < 45)) { positionValue = Constants.CODE_POSITION_SITTING; position = Position.Sitting; } if ((oldPositionValue != positionValue) && (positionValue != 0) && startRecordingFlag) { updatePositionChangeTime(oldPositionValue); oldPositionValue = positionValue; try { // Write raw body position data BufferedWriter orientationBufferedWriter = new BufferedWriter( new FileWriter(orientationFile, true)); orientationBufferedWriter.append(String.valueOf(System.currentTimeMillis()) + ","); orientationBufferedWriter.append(String.valueOf(positionValue) + "\n"); orientationBufferedWriter.flush(); orientationBufferedWriter.close(); } catch (IOException e) { Log.e(Constants.CODE_APP_TAG, "Error writing orientation data to file", e); } } } } } }
From source file:org.gwaspi.statistics.ChiSqrBoundaryCalculator.java
protected static void calculateChisqrBoundaryByFormula() throws IOException, MathException { FileWriter repFW = new FileWriter(boundaryPath); BufferedWriter repBW = new BufferedWriter(repFW); NetcdfFile ncfile = NetcdfFile.open(netCDFFile); List<Dimension> dims = ncfile.getDimensions(); Dimension sizeDim = dims.get(0); Dimension simsDim = dims.get(1); String varName = "distributions"; Variable distributions = ncfile.findVariable(varName); try {/*from w ww . j a va 2 s . c o m*/ for (int i = 0; i < pointsNb; i++) { //distributions(i:i:1, 0:simsNb:1) ArrayDouble.D2 rdDoubleArrayD2 = (ArrayDouble.D2) distributions .read(i + ":" + i + ":1, 0:" + (simsDim.getLength() - 1) + ":1"); ArrayDouble.D1 rdDoubleArrayD1 = (D1) rdDoubleArrayD2.reduce(); double sampleSize = rdDoubleArrayD2.getSize(); double currentTot = 0; double[] allValues = new double[(int) sampleSize]; for (int j = 0; j < sampleSize; j++) { allValues[j] = rdDoubleArrayD1.get(j); currentTot += rdDoubleArrayD1.get(j); } StandardDeviation stdDev = new StandardDeviation(); double stdDevValue = stdDev.evaluate(allValues); double currentAvg = currentTot / simNb; TDistributionImpl tDistImpl = new TDistributionImpl(sampleSize - 1); double tInvCumulProb = tDistImpl.inverseCumulativeProbability(0.05d); double tCumulProb = tDistImpl.cumulativeProbability(0.05d); // confidenceInterval = (STDEV(Ys) / SQRT(COUNT(Ys))) * TINV(0.05, COUNT(Ys) - 1) double confidenceInterval = (stdDevValue / Math.sqrt(sampleSize)) * tInvCumulProb; double low95 = currentAvg - confidenceInterval; double top95 = currentAvg + confidenceInterval; StringBuilder sb = new StringBuilder(); sb.append(top95); sb.append(","); sb.append(currentAvg); sb.append(","); sb.append(low95); repBW.append(sb + "\n"); } } catch (IOException ex) { log.error("Cannot read data", ex); } catch (InvalidRangeException ex) { log.error("Cannot read data", ex); } repBW.close(); repFW.close(); log.info("Confidence boundary created for {} points", N); }
From source file:org.olat.upgrade.OLATUpgrade_6_3_0.java
/** copied from 6.2.x version of CourseLogsArchiveManager and modified file handling to avoid going via VFS **/ private File readSequence(final File leaf, final File outDir) { String line;/*from w ww . j a v a2s . co m*/ final File resultingFile = new File(outDir, leaf.getName()); BufferedReader br = null; FileOutputStream fos = null; BufferedWriter writer = null; boolean zeroErrors = true; try { br = new BufferedReader(new InputStreamReader(new FileInputStream(leaf))); fos = new FileOutputStream(resultingFile); writer = new BufferedWriter(new OutputStreamWriter(fos)); while (null != (line = br.readLine())) { line = convertLine(line); // <MODIFIED FOR SAFETY> if (line.length() == 0) { log.warn("**** !!!! Conversion failed with file: " + leaf); zeroErrors = false; } // </MODIFIED FOR SAFETY> writer.append(line); writer.append("\r\n"); } } catch (final IOException e) { log.error("**** !!!! Could not convert file to apache format: " + leaf); return null; } finally { if (!zeroErrors) { filesWithApacheConversionErrors_++; } if (br != null) { try { br.close(); } catch (final Exception e) { // this empty catch is ok } } if (writer != null) { try { writer.close(); } catch (final Exception e) { // this empty catch is ok } } } return resultingFile; }
From source file:org.ihtsdo.classifier.ClassificationRunner.java
/** * Consolidate rels.// www . j a va 2s. c om * * @throws Exception the exception */ private void consolidateRels() throws IOException { FileOutputStream fos = new FileOutputStream(newInferredRelationships); OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF-8"); BufferedWriter bw = new BufferedWriter(osw); FileInputStream rfis = new FileInputStream(tempRelationshipStore); InputStreamReader risr = new InputStreamReader(rfis, "UTF-8"); BufferedReader rbr = new BufferedReader(risr); String line; while ((line = rbr.readLine()) != null) { bw.append(line); bw.append("\r\n"); } rbr.close(); rbr = null; rfis = null; risr = null; String[] spl; for (String relFile : previousInferredRelationships) { rfis = new FileInputStream(relFile); risr = new InputStreamReader(rfis, "UTF-8"); rbr = new BufferedReader(risr); rbr.readLine(); while ((line = rbr.readLine()) != null) { spl = line.split("\t", -1); if (retiredSet.contains(spl[0])) { continue; } bw.append(line); bw.append("\r\n"); } rbr.close(); rbr = null; rfis = null; risr = null; } bw.close(); tempRelationshipStore.delete(); }
From source file:gov.utah.dts.det.ccl.service.impl.FacilityServiceImpl.java
@Scheduled(cron = "0 0 22 * * MON-FRI") public void sendERepFile() throws Exception { logger.debug("Running eRep export"); String host = applicationService.getApplicationPropertyValue(ApplicationPropertyKey.EREP_HOST.getKey()); String remotePath = applicationService .getApplicationPropertyValue(ApplicationPropertyKey.EREP_REMOTE_PATH.getKey()); String filename = applicationService .getApplicationPropertyValue(ApplicationPropertyKey.EREP_FILENAME.getKey()); String username = applicationService .getApplicationPropertyValue(ApplicationPropertyKey.EREP_USERNAME.getKey()); String password = applicationService .getApplicationPropertyValue(ApplicationPropertyKey.EREP_PASSWORD.getKey()); String port = applicationService.getApplicationPropertyValue(ApplicationPropertyKey.EREP_PORT.getKey()); if (StringUtils.isNotBlank(host) && StringUtils.isNotBlank(remotePath) && StringUtils.isNotBlank(filename) && StringUtils.isNotBlank(username) && StringUtils.isNotBlank(password)) { logger.debug("Creating file " + filename); File file = new File(filename); BufferedWriter writer = new BufferedWriter(new FileWriter(file)); SimpleDateFormat expDateFormatter = new SimpleDateFormat("MM/dd/yyyy"); SimpleDateFormat modDateFormatter = new SimpleDateFormat("ddMMMyy"); writer.append( "\"Internal Id\",\"Type of Facility\",\"Facility Name\",\"Phone\",\"Address\",\"City\",\"State\",\"Zip Code\",\"Total # Adults\",\"Total # Youth\",\"Expiration Date\",\"Licensor\",\"Status\",\"Date of Modification\"\n"); List<ErepView> erepViews = facilityDao.getErepViews(); for (ErepView view : erepViews) { if (!("Active".equals(view.getStatus()) && StringUtils.isBlank(view.getLicenseType()))) { writer.append(view.getId().toString()); addCsvField(writer, view.getLicenseType() == null ? "FX" : view.getLicenseType()); addCsvField(writer, view.getFacilityName()); if (view.getPrimaryPhone() != null && view.getPrimaryPhone().length() == 10) { writer.append(CSV_FIELD_START); writer.append(view.getPrimaryPhone().substring(0, 3)); writer.append('-'); writer.append(view.getPrimaryPhone().substring(3, 6)); writer.append('-'); writer.append(view.getPrimaryPhone().substring(6, 10)); writer.append(CSV_FIELD_END); } else { addCsvField(writer, view.getPrimaryPhone()); }/* w w w . j a va2 s . c om*/ writer.append(CSV_FIELD_START); writer.append(view.getAddressOne() == null ? "" : view.getAddressOne()); if (!StringUtils.isEmpty(view.getAddressTwo())) { writer.append(", "); writer.append(view.getAddressTwo()); } writer.append(CSV_FIELD_END); addCsvField(writer, view.getCity()); addCsvField(writer, view.getState()); addCsvField(writer, view.getZipCode() != null && view.getZipCode().length() > 5 ? view.getZipCode().substring(0, 5) : view.getZipCode()); writer.append(","); writer.append(view.getAdultTotalSlots() == null ? "0" : view.getAdultTotalSlots().toString()); writer.append(","); writer.append(view.getYouthTotalSlots() == null ? "0" : view.getYouthTotalSlots().toString()); addCsvField(writer, view.getExpirationDate() == null ? "" : expDateFormatter.format(view.getExpirationDate())); writer.append(CSV_FIELD_START); writer.append( StringUtils.isBlank(view.getLicensorFirstName()) ? "" : view.getLicensorFirstName()); writer.append(StringUtils.isBlank(view.getLicensorLastName()) ? "" : " "); writer.append( StringUtils.isBlank(view.getLicensorLastName()) ? "" : view.getLicensorLastName()); writer.append(CSV_FIELD_END); addCsvField(writer, view.getStatus()); addCsvField(writer, view.getModifiedDate() == null ? "" : modDateFormatter.format(view.getModifiedDate()).toUpperCase()); writer.append("\n"); } } writer.flush(); writer.close(); logger.debug("File created. Ready to send to eRep."); ScpUtil.scpTo(host, StringUtils.isBlank(port) ? -1 : Integer.parseInt(port), username, password, file, remotePath); } else { logger.debug("Unable to send file. Required properties were not provided."); } }