List of usage examples for java.io FileWriter write
public void write(int c) throws IOException
From source file:com.amazonaws.eclipse.ec2.keypairs.KeyPairManager.java
/** * Requests a new key pair from EC2 with the specified name, and saves the * private key portion in the specified directory. * * @param accountId/*ww w .j a va 2 s . com*/ * The account id that owns the key name * @param keyPairName * The name of the requested key pair. * @param keyPairDirectory * The directory in which to save the private key file. * @param ec2RegionOverride * The region where the EC2 key pair is created. * @throws IOException * If any problems were encountered storing the private key to * disk. * @throws AmazonClientException * If any problems were encountered requesting a new key pair * from EC2. */ public void createNewKeyPair(String accountId, String keyPairName, String keyPairDirectory, Region ec2RegionOverride) throws IOException, AmazonClientException { File keyPairDirectoryFile = new File(keyPairDirectory); if (!keyPairDirectoryFile.exists()) { if (!keyPairDirectoryFile.mkdirs()) { throw new IOException("Unable to create directory: " + keyPairDirectory); } } /** * It's possible that customers could have two keys with the same name, * so silently rename to avoid such a conflict. This isn't the most * straightforward user interface, but probably better than enforced * directory segregation by account, or else disallowing identical names * across accounts. */ File privateKeyFile = new File(keyPairDirectoryFile, keyPairName + PRIVATE_KEY_SUFFIX); int i = 1; while (privateKeyFile.exists()) { privateKeyFile = new File(keyPairDirectoryFile, keyPairName + "-" + i + PRIVATE_KEY_SUFFIX); } CreateKeyPairRequest request = new CreateKeyPairRequest(); request.setKeyName(keyPairName); AmazonEC2 ec2 = null; if (ec2RegionOverride == null) { ec2 = Ec2Plugin.getDefault().getDefaultEC2Client(); } else { ec2 = AwsToolkitCore.getClientFactory() .getEC2ClientByEndpoint(ec2RegionOverride.getServiceEndpoint(ServiceAbbreviations.EC2)); } CreateKeyPairResult response = ec2.createKeyPair(request); KeyPair keyPair = response.getKeyPair(); String privateKey = keyPair.getKeyMaterial(); FileWriter writer = new FileWriter(privateKeyFile); try { writer.write(privateKey); } finally { writer.close(); } registerKeyPair(accountId, keyPairName, privateKeyFile.getAbsolutePath()); /* * SSH requires our private key be locked down. */ try { /* * TODO: We should model these platform differences better (and * support windows). */ Runtime.getRuntime().exec("chmod 600 " + privateKeyFile.getAbsolutePath()); } catch (IOException e) { Status status = new Status(Status.WARNING, Ec2Plugin.PLUGIN_ID, "Unable to restrict permissions on private key file: " + e.getMessage(), e); StatusManager.getManager().handle(status, StatusManager.LOG); } }
From source file:PSOResultFileMerger.java
@Override public void run(CommandLine commandLine) throws Exception { Problem problem = null;// ww w . jav a 2 s .co m NondominatedPopulation mergedSet = null; PSOResultFileReader reader = null; // setup the merged non-dominated population if (commandLine.hasOption("epsilon")) { double[] epsilon = TypedProperties.withProperty("epsilon", commandLine.getOptionValue("epsilon")) .getDoubleArray("epsilon", null); mergedSet = new EpsilonBoxDominanceArchive(new EpsilonBoxDominanceComparator(epsilon)); } else { mergedSet = new NondominatedPopulation(); } System.out.println("setup initial archive"); try { // setup the problem if (commandLine.hasOption("problem")) { problem = ProblemFactory.getInstance().getProblem(commandLine.getOptionValue("problem")); } else { problem = new PSOProblemStub(Integer.parseInt(commandLine.getOptionValue("vars")), Integer.parseInt(commandLine.getOptionValue("dimension"))); } System.out.println("setup problem"); boolean changed = false; // read in result files for (String filename : commandLine.getArgs()) { try { reader = new PSOResultFileReader(problem, new File(filename)); System.out.println("initialized reader"); while (reader.hasNext()) { changed = mergedSet.addAll(reader.next().getPopulation()); System.out.println("Added to mergedSet. Changed: " + changed); } } finally { if (reader != null) { reader.close(); } } } File output = new File(commandLine.getOptionValue("output")); System.out.println("outputting"); // output merged set if (commandLine.hasOption("resultFile")) { ResultFileWriter writer = null; //delete the file to avoid appending CoreUtils.delete(output); try { writer = new ResultFileWriter(problem, output); writer.append(new ResultEntry(mergedSet)); } finally { if (writer != null) { writer.close(); } } } else { //PopulationIO.writeObjectives(output, mergedSet); FileWriter outputstream = new FileWriter(output); for (Solution solution : mergedSet) { int jj; for (jj = 0; jj < solution.getNumberOfVariables(); jj++) { outputstream.write(((RealVariable) solution.getVariable(jj)).getValue() + " "); System.out.print(((RealVariable) solution.getVariable(jj)).getValue() + " "); } for (jj = 0; jj < solution.getNumberOfObjectives() - 1; jj++) { outputstream.write(solution.getObjective(jj) + " "); System.out.print(solution.getObjective(jj) + " "); } outputstream.write(solution.getObjective(jj) + "\n"); System.out.print(solution.getObjective(jj) + "\n"); outputstream.flush(); } } } finally { if (problem != null) { problem.close(); } } }
From source file:com.grillecube.editor.ModelEditor.java
private void writeConfig(File file) { try {// ww w . j ava 2 s .c o m String nl = System.getProperty("line.separator"); FileWriter writer = new FileWriter(file); writer.write("{"); writer.write(nl); for (String str : this._config.keySet()) { writer.write("\t\""); writer.write(str); writer.write("\":"); writer.write(String.valueOf(this.getConfig(str))); writer.write(","); writer.write(nl); } writer.write("}"); writer.write(nl); writer.flush(); writer.close(); } catch (Exception e) { Logger.get().log(Level.WARNING, "Couldnt create config file"); e.printStackTrace(Logger.get().getPrintStream()); } }
From source file:DIA_Umpire_To_Skyline.FileThread.java
private void ChangeScanTitlePepXML() throws FileNotFoundException, IOException { File fileEntry = new File(FilenameUtils.getFullPath(mzXMLFile)); String basename = FilenameUtils.getBaseName(mzXMLFile); for (File file : fileEntry.listFiles()) { if (file.isFile() && file.getAbsoluteFile().toString().toLowerCase().endsWith("pep.xml")) { String pepxmlbase = file.getName().split("\\.")[0]; if (pepxmlbase.equals(basename + "_Q1") || pepxmlbase.equals(basename + "_Q2") || pepxmlbase.equals(basename + "_Q3")) { BufferedReader reader = new BufferedReader(new FileReader(file)); String outputname = file.getName().replace("_Q", ".ForLibQ"); Logger.getRootLogger() .info("Writing new pepXML files and correct the scan titles: " + outputname); FileWriter writer = new FileWriter(FilenameUtils.getFullPath(mzXMLFile) + FilenameUtils.getBaseName(mzXMLFile) + "_Skyline/" + outputname); String line = ""; while ((line = reader.readLine()) != null) { writer.write(line.replaceAll(basename + "_Q", basename + ".ForLibQ") + "\n"); }/*ww w .jav a 2s . c o m*/ writer.close(); } } } }
From source file:net.sf.reportengine.out.TestHtmlReportOutput.java
/** * Test method for// ww w . ja v a2 s. c o m * {@link net.sf.reportengine.out.AbstractFreemarkerReportOutput#close()}. */ @Test public void testNonClosedWriter() throws IOException { final String OUTPUT_PATH = "./target/TestClosingWriter.html"; FileWriter fileWriter = new FileWriter(OUTPUT_PATH); HtmlReportOutput classUnderTest = new HtmlReportOutput(fileWriter, false); classUnderTest.open(); classUnderTest.output("emptyLine.ftl"); classUnderTest.close(); // at this point the writer should be already closed fileWriter.write("\nthis text has been added after HtmlReportOutput has been closed"); fileWriter.flush(); fileWriter.close(); List<String> lines = IOUtils.readLines(new FileReader(OUTPUT_PATH)); assertEquals(3, lines.size()); assertEquals("<br/>", lines.get(0)); assertEquals("<br/>", lines.get(1)); assertEquals("this text has been added after HtmlReportOutput has been closed", lines.get(2)); }
From source file:com.cyberway.issue.crawler.framework.CrawlScope.java
/** * Add a new seed to scope. By default, simply appends * to seeds file, though subclasses may handle differently. * * <p>This method is *not* sufficient to get the new seed * scheduled in the Frontier for crawling -- it only * affects the Scope's seed record (and decisions which * flow from seeds). /*from w w w . j a v a 2 s . c om*/ * * @param curi CandidateUri to add * @return true if successful, false if add failed for any reason */ public boolean addSeed(final CandidateURI curi) { File f = getSeedfile(); if (f != null) { try { FileWriter fw = new FileWriter(f, true); // Write to new (last) line the URL. fw.write("\n"); fw.write("# Heritrix added seed " + ((curi.getVia() != null) ? "redirect from " + curi.getVia() : "(JMX)") + ".\n"); fw.write(curi.toString()); fw.flush(); fw.close(); Iterator iter = seedListeners.iterator(); while (iter.hasNext()) { ((SeedListener) iter.next()).addedSeed(curi); } return true; } catch (IOException e) { DevUtils.warnHandle(e, "problem writing new seed"); } } return false; }
From source file:com.mirth.connect.server.controllers.DefaultEventController.java
@Override public String exportAllEvents() throws ControllerException { logger.debug("exporting events"); long currentTimeMillis = System.currentTimeMillis(); String currentDateTime = new SimpleDateFormat("yyyy-MM-dd-HHmmss").format(currentTimeMillis); String appDataDir = ControllerFactory.getFactory().createConfigurationController().getApplicationDataDir(); File exportDir = new File(appDataDir, "exports"); exportDir.mkdir();/* ww w .j a v a2 s. c o m*/ File exportFile = new File(exportDir, currentDateTime + "-events.txt"); try { FileWriter writer = new FileWriter(exportFile, true); // write the CSV headers to the file writer.write(ServerEvent.getExportHeader()); writer.write(System.getProperty("line.separator")); EventFilter filter = new EventFilter(); int maxEventId = getMaxEventId(); filter.setMaxEventId(maxEventId); int interval = 10; List<ServerEvent> events = getEvents(filter, null, interval); while (!events.isEmpty()) { for (ServerEvent event : events) { writer.write(event.toExportString()); if (event.getId() <= maxEventId) { maxEventId = event.getId() - 1; } } filter.setMaxEventId(maxEventId); events = getEvents(filter, null, interval); } IOUtils.closeQuietly(writer); logger.debug("events exported to file: " + exportFile.getAbsolutePath()); ServerEvent event = new ServerEvent( ControllerFactory.getFactory().createConfigurationController().getServerId(), "Sucessfully exported events"); event.addAttribute("file", exportFile.getAbsolutePath()); dispatchEvent(event); } catch (IOException e) { throw new ControllerException("Error exporting events to file.", e); } return exportFile.getAbsolutePath(); }
From source file:cascading.tap.hadoop.DistCacheTapPlatformTest.java
@Test public void testGlobSupport() throws Exception { getPlatform().copyFromLocal(inputFileLower); File dir = File.createTempFile("distcachetap", Long.toString(System.nanoTime())); if (dir.exists()) { if (dir.isDirectory()) FileUtils.deleteDirectory(dir); else/*from w ww. j ava 2 s . c om*/ dir.delete(); } dir.mkdirs(); String[] data = new String[] { "1 A", "2 B", "3 C", "4 D", "5 E" }; for (int i = 0; i < 5; i++) { FileWriter fw = new FileWriter(new File(dir.getAbsolutePath(), "upper_" + i + ".txt")); fw.write(data[i]); fw.close(); } dir.deleteOnExit(); getPlatform().copyFromLocal(dir.getAbsolutePath()); Tap sourceLower = getPlatform().getTextFile(new Fields("offset", "line"), inputFileLower); Tap sourceUpper = new DistCacheTap( (Hfs) getPlatform().getTextFile(new Fields("offset", "line"), dir.getAbsolutePath() + "/*")); Map sources = new HashMap(); sources.put("lower", sourceLower); sources.put("upper", sourceUpper); Tap sink = getPlatform().getTextFile(new Fields("line"), getOutputPath(getTestName() + "join"), SinkMode.REPLACE); Function splitter = new RegexSplitter(new Fields("num", "char"), " "); Pipe pipeLower = new Each(new Pipe("lower"), new Fields("line"), splitter); Pipe pipeUpper = new Each(new Pipe("upper"), new Fields("line"), splitter); Pipe splice = new HashJoin(pipeLower, new Fields("num"), pipeUpper, new Fields("num"), Fields.size(4)); Map<Object, Object> properties = getProperties(); Flow flow = getPlatform().getFlowConnector(properties).connect("distcache test", sources, sink, splice); flow.complete(); validateLength(flow, 5); List<Tuple> values = getSinkAsList(flow); assertTrue(values.contains(new Tuple("1\ta\t1\tA"))); assertTrue(values.contains(new Tuple("2\tb\t2\tB"))); assertTrue(values.contains(new Tuple("3\tc\t3\tC"))); assertTrue(values.contains(new Tuple("4\td\t4\tD"))); assertTrue(values.contains(new Tuple("5\te\t5\tE"))); }
From source file:org.openspaces.maven.plugin.CreatePUProjectMojo.java
/** * Extracts the project files to the project directory. *///from w w w . j av a 2s. c o m private void extract(URL url) throws Exception { packageDirs = packageName.replaceAll("\\.", "/"); String puTemplate = DIR_TEMPLATES + "/" + template + "/"; int length = puTemplate.length() - 1; BufferedInputStream bis = new BufferedInputStream(url.openStream()); JarInputStream jis = new JarInputStream(bis); JarEntry je; byte[] buf = new byte[1024]; int n; while ((je = jis.getNextJarEntry()) != null) { String jarEntryName = je.getName(); PluginLog.getLog().debug("JAR entry: " + jarEntryName); if (je.isDirectory() || !jarEntryName.startsWith(puTemplate)) { continue; } String targetFileName = projectDir + jarEntryName.substring(length); // convert the ${gsGroupPath} to directory targetFileName = StringUtils.replace(targetFileName, FILTER_GROUP_PATH, packageDirs); PluginLog.getLog().debug("Extracting entry " + jarEntryName + " to " + targetFileName); // read the bytes to the buffer ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); while ((n = jis.read(buf, 0, 1024)) > -1) { byteStream.write(buf, 0, n); } // replace property references with the syntax ${property_name} // to their respective property values. String data = byteStream.toString(); data = StringUtils.replace(data, FILTER_GROUP_ID, packageName); data = StringUtils.replace(data, FILTER_ARTIFACT_ID, projectDir.getName()); data = StringUtils.replace(data, FILTER_GROUP_PATH, packageDirs); // write the entire converted file content to the destination file. File f = new File(targetFileName); File dir = f.getParentFile(); if (!dir.exists()) { dir.mkdirs(); } FileWriter writer = new FileWriter(f); writer.write(data); jis.closeEntry(); writer.close(); } jis.close(); }
From source file:com.dmsl.anyplace.tasks.DownloadRadioMapTaskBuid.java
@Override protected String doInBackground(Void... params) { boolean releaseLock = false; try {/*w w w .j a v a 2s. c o m*/ if (json_req == null) return "Error creating the request!"; // check sdcard state File root; try { root = AnyplaceUtils.getRadioMapFoler(ctx, mBuildID, mFloor_number); } catch (Exception e) { return e.getMessage(); } File okfile = new File(root, "ok.txt"); if (!mForceDonwload && okfile.exists()) { success = true; return "Successfully read radio map from cache!"; } // Allow only one download of the radiomap synchronized (downInProgress) { if (downInProgress == false) { downInProgress = true; releaseLock = true; } else { return "Already downloading radio map. Please wait..."; } } runPreExecuteOnUI(); okfile.delete(); // receive only the radio map for the current floor 0 timeout overrides default timeout String response = NetworkUtils.downloadHttpClientJsonPost(AnyplaceAPI.getRadioDownloadBuid(), json_req, 0); JSONObject json = new JSONObject(response); if (json.getString("status").equalsIgnoreCase("error")) { return "Error Message: " + json.getString("message"); } String means = json.getString("map_url_mean"); // create the credentials JSON in order to send and download the radio map JSONObject json_credentials = new JSONObject(); json_credentials.put("username", "username"); json_credentials.put("password", "pass"); String cred_str = json_credentials.toString(); String ms = NetworkUtils.downloadHttpClientJsonPost(means, cred_str); // check if the files downloaded correctly if (ms.contains("error")) { json = new JSONObject(response); return "Error Message: " + json.getString("message"); } // rename the radiomap according to the floor // parameters and weights not used any more (RPF Algorithm Removed) String filename_radiomap_download = AnyplaceUtils.getRadioMapFileName(mFloor_number); String mean_fname = filename_radiomap_download; // String rbf_weights_fname = mean_fname.replace(".txt", "-rbf-weights.txt"); // String parameters_fname = mean_fname.replace(".txt", "-parameters.txt"); FileWriter out; out = new FileWriter(new File(root, mean_fname)); out.write(ms); out.close(); out = new FileWriter(okfile); out.write("ok;version:0;"); out.close(); waitPreExecute(); success = true; return "Successfully saved radio maps!"; } catch (ConnectTimeoutException e) { return "Connecting to Anyplace service is taking too long!"; } catch (SocketTimeoutException e) { return "Communication with the server is taking too long!"; } catch (Exception e) { return "Error downloading radio maps [ " + e.getMessage() + " ]"; } finally { if (releaseLock) downInProgress = false; } }