List of usage examples for java.lang System lineSeparator
String lineSeparator
To view the source code for java.lang System lineSeparator.
Click Source Link
From source file:org.apache.cassandra.db.lifecycle.LogFile.java
public String toString(boolean showContents) { StringBuilder str = new StringBuilder(); str.append('['); str.append(getFileName());/*w w w . ja v a2 s . c o m*/ str.append(" in "); str.append(replicas.getDirectories()); str.append(']'); if (showContents) { str.append(System.lineSeparator()); str.append("Files and contents follow:"); str.append(System.lineSeparator()); replicas.printContentsWithAnyErrors(str); } return str.toString(); }
From source file:org.apache.zeppelin.submarine.hadoop.YarnClient.java
public List<Map<String, Object>> getAppAttempts(String appId) { List<Map<String, Object>> appAttempts = new ArrayList<>(); String appUrl = this.yarnWebHttpAddr + "/ws/v1/cluster/apps/" + appId + "/appattempts?_=" + System.currentTimeMillis(); InputStream inputStream = null; try {//from www.jav a 2 s . c o m HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET); inputStream = response.getEntity().getContent(); String result = new BufferedReader(new InputStreamReader(inputStream)).lines() .collect(Collectors.joining(System.lineSeparator())); if (response.getStatusLine().getStatusCode() != 200 /*success*/) { LOGGER.warn("Status code " + response.getStatusLine().getStatusCode()); LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders())); LOGGER.warn("result\n" + result); } // parse app status json appAttempts = parseAppAttempts(result); } catch (Exception exp) { exp.printStackTrace(); } finally { try { if (null != inputStream) { inputStream.close(); } } catch (Exception e) { LOGGER.error(e.getMessage(), e); } } return appAttempts; }
From source file:kmi.taa.core.PredicateObjectRetriever.java
public String csvClean(String origin) { if (origin.equals("")) return ""; StringBuilder builder = new StringBuilder(); String[] str = origin.split(System.getProperty("line.separator")); String[] line = new String[2]; int i = 0;/* w w w. ja v a 2 s . c o m*/ while (i < str.length) { line = str[i].split(","); try { // if this line is a remaining part of the previous line because of // a new line symbol, it will be added to the end of the previous line // without line break if (line.length == 1) { builder.deleteCharAt(builder.length() - 1); if (line[0].charAt(0) == '"' && line[0].charAt(line[0].length() - 1) == '"') { builder.append(line[0].substring(1, line[0].length() - 1)); } else if (line[0].charAt(0) == '"' && line[0].charAt(line[0].length() - 1) != '"') { builder.append(line[0].substring(1, line[0].length())); } else if (line[0].charAt(line[0].length() - 1) == '"' && line[0].charAt(0) != '"') { builder.append(line[0].substring(0, line[0].length() - 1)); } else { builder.append(line[0]); } } else { if (line[0].charAt(0) != '"' && line[0].charAt(line[0].length() - 1) != '"') { builder.deleteCharAt(builder.length() - 1); builder.append(str[i].substring(0, str[i].length() - 1)); } else if (line[1].charAt(0) == '"' && line[1].charAt(line[1].length() - 1) == '"') { builder.append(line[0].substring(1, line[0].length() - 1) + "\t" + line[1].substring(1, line[1].length() - 1)); } else if (line[1].charAt(0) == '"' && line[1].charAt(line[1].length() - 1) != '"') { builder.append(line[0].substring(1, line[0].length() - 1) + "\t" + line[1].substring(1, line[1].length())); } else if (line[1].charAt(line[1].length() - 1) == '"' && line[1].charAt(0) != '"') { builder.append(line[0].substring(1, line[0].length() - 1) + "\t" + line[1].substring(0, line[1].length() - 1)); } else { builder.append(line[0].substring(1, line[0].length() - 1) + "\t" + line[1]); } } builder.append(System.lineSeparator()); } catch (Exception e) { } i++; } return builder.toString(); }
From source file:com.pearson.eidetic.driver.threads.MonitorSnapshotVolumeTime.java
private HashMap<Date, ArrayList<Volume>> extractRunAt(ArrayList<Volume> volumes) { JSONParser parser = new JSONParser(); HashMap<Date, ArrayList<Volume>> returnHash = new HashMap(); for (Volume volume : volumes) { for (Tag tag : volume.getTags()) { String tagValue = null; if (tag.getKey().equalsIgnoreCase("Eidetic")) { tagValue = tag.getValue(); }//from w ww . ja va 2 s .co m if (tagValue == null) { continue; } JSONObject eideticParameters; try { Object obj = parser.parse(tagValue); eideticParameters = (JSONObject) obj; } catch (Exception e) { logger.error("awsAccountNickname=\"" + awsAccount_.getUniqueAwsAccountIdentifier() + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + volume.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); continue; } JSONObject createSnapshot; try { createSnapshot = (JSONObject) eideticParameters.get("CreateSnapshot"); } catch (Exception e) { logger.error("awsAccountNickname=\"" + awsAccount_.getUniqueAwsAccountIdentifier() + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + volume.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); continue; } String runAt = null; if (createSnapshot.containsKey("RunAt")) { runAt = createSnapshot.get("RunAt").toString(); } Date date = null; try { date = dayFormat_.parse(runAt); } catch (ParseException e) { logger.error("awsAccountNickname=\"" + awsAccount_.getUniqueAwsAccountIdentifier() + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + volume.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); } if (date == null) { continue; } if (returnHash.keySet().contains(date)) { returnHash.get(date).add(volume); } else { ArrayList<Volume> newArrayList = new ArrayList(); newArrayList.add(volume); returnHash.put(date, newArrayList); } break; } } return returnHash; }
From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotVolumeNoTime.java
public boolean snapshotCreation(AmazonEC2Client ec2Client, Volume vol, String period, Date date) { if ((date == null) || (ec2Client == null) || (vol == null) || (period == null)) { return false; }//from w ww .j av a2 s . c o m try { if ("day".equalsIgnoreCase(period)) { } else if ("hour".equalsIgnoreCase(period)) { } else if ("week".equalsIgnoreCase(period)) { } else if ("month".equalsIgnoreCase(period)) { } else { logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId() + "\""); return false; } Collection<Tag> tags_volume = getResourceTags(vol); String volumeAttachmentInstance = "none"; try { volumeAttachmentInstance = vol.getAttachments().get(0).getInstanceId(); } catch (Exception e) { logger.debug("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Volume not attached to instance: " + vol.getVolumeId()); } String description = period + "_snapshot " + vol.getVolumeId() + " by Eidetic at " + date.toString() + ". Volume attached to " + volumeAttachmentInstance; Snapshot current_snap; try { current_snap = createSnapshotOfVolume(ec2Client, vol, description, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error\", Error=\"error creating snapshot from volume\", Volume_id=\"" + vol.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } try { setResourceTags(ec2Client, current_snap, tags_volume, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event\"Error\", Error=\"error adding tags to snapshot\", Snapshot_id=\"" + current_snap.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } } catch (Exception e) { logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error, Error=\"error in snapshotCreation\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } return true; }
From source file:com.netflix.genie.web.jobs.workflow.impl.InitialSetupTask.java
@VisibleForTesting void createJobEnvironmentVariables(final Writer writer, final String jobId, final String jobName, final int memory, final Set<String> tags, final String grouping, final String groupingInstance) throws IOException { writer.write(JobConstants.EXPORT + JobConstants.GENIE_JOB_ID_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + jobId + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); // Append new line writer.write(System.lineSeparator()); // create environment variable for the job name writer.write(JobConstants.EXPORT + JobConstants.GENIE_JOB_NAME_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + jobName + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); // Append new line writer.write(LINE_SEPARATOR);// w w w .j ava 2s .c o m // create environment variable for the job name writer.write(JobConstants.EXPORT + JobConstants.GENIE_JOB_MEMORY_ENV_VAR + JobConstants.EQUALS_SYMBOL + memory + LINE_SEPARATOR); // Append new line writer.write(LINE_SEPARATOR); // create environment variable for the job name writer.write(JobConstants.EXPORT + JobConstants.GENIE_JOB_TAGS_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + this.tagsToString(tags) + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); // Append new line writer.write(LINE_SEPARATOR); // create environment variable for the job tags writer.write(JobConstants.EXPORT + JobConstants.GENIE_JOB_GROUPING_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + grouping + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); // Append new line writer.write(LINE_SEPARATOR); // create environment variable for the job tags writer.write(JobConstants.EXPORT + JobConstants.GENIE_JOB_GROUPING_INSTANCE_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + groupingInstance + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); // Append new line writer.write(LINE_SEPARATOR); }
From source file:tds.student.web.handlers.TestShellHandler.java
private void LogClientLatencies(TestOpportunity testOpp, List<ClientLatency> clientLatencies, HttpServletRequest request) {/*from www . ja v a 2 s. co m*/ StringBuilder errorBuilder = new StringBuilder(); // look for errors for (ClientLatency clientLatency : clientLatencies) { List<String> latencyErrors = clientLatency.getErrors(); // log any latency validation errors if (latencyErrors != null && latencyErrors.size() > 0) { errorBuilder.append("PAGE ").append(clientLatency.getItemPage()).append(" ERRORS:"); for (String error : latencyErrors) { errorBuilder.append("* "); errorBuilder.append(error); errorBuilder.append(System.lineSeparator()); } errorBuilder.append(System.lineSeparator()); } } // write latency to DB try { _oppRepository.recordClientLatencies(testOpp.getOppInstance(), clientLatencies); } catch (Exception ex) { // log any exceptions errorBuilder.append("EXCEPTION: ").append(ex); errorBuilder.append(System.lineSeparator()); errorBuilder.append(System.lineSeparator()); } // write error to DB if (errorBuilder.length() > 0) { String message = String.format("Client latency exception/errors have occured: %s", errorBuilder.toString()); _tdsLogger.applicationError(message, "LogClientLatencies", request, null); } }
From source file:com.hybridbpm.core.util.HybridbpmCoreUtil.java
public static String updateFormCode(FormModel formModel, String code, String methodName, String start, String end) {/* w w w . ja v a 2 s . c o m*/ try { StringBuilder parametersBuilder = new StringBuilder(); Method method = FieldModelUtil.class.getMethod(methodName, FieldModel.class, String.class); for (FieldModel field : formModel.getParameters()) { String line = (String) method.invoke(null, field, null); if (!line.isEmpty()) { parametersBuilder.append(line).append(System.lineSeparator()); } } return replaceGeneratedCode(code, parametersBuilder.toString(), start, end); } catch (Exception ex) { logger.log(Level.SEVERE, ex.getMessage(), ex); return code; } }
From source file:de.tuttas.restful.VertretungsManager.java
private String loadTemplate() { String pathTemplate = Config.class.getProtectionDomain().getCodeSource().getLocation().getPath(); pathTemplate = pathTemplate.substring(0, pathTemplate.indexOf("Config.class")); pathTemplate = pathTemplate + File.separator + "templateVertretung.txt"; Log.d("Path=" + pathTemplate); BufferedReader br;/*from w ww. j a v a2 s.com*/ StringBuilder sb = new StringBuilder(); try { br = new BufferedReader(new InputStreamReader(new FileInputStream(pathTemplate), "UTF8")); String line = br.readLine(); while (line != null) { sb.append(line); sb.append(System.lineSeparator()); line = br.readLine(); } Log.d("Habe gelesen:" + sb); } catch (FileNotFoundException ex) { Logger.getLogger(UmfagenManager.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(UmfagenManager.class.getName()).log(Level.SEVERE, null, ex); } return sb.toString(); }
From source file:org.apache.hadoop.tools.TestHadoopArchiveLogs.java
private void _testGenerateScript(boolean proxy) throws Exception { Configuration conf = new Configuration(); HadoopArchiveLogs hal = new HadoopArchiveLogs(conf); ApplicationId app1 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 1); ApplicationId app2 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 2); hal.eligibleApplications.add(new HadoopArchiveLogs.AppInfo(app1.toString(), USER)); hal.eligibleApplications.add(new HadoopArchiveLogs.AppInfo(app2.toString(), USER)); hal.proxy = proxy;//from w ww . ja v a 2 s . c o m File localScript = new File("target", "script.sh"); Path workingDir = new Path("/tmp", "working"); Path remoteRootLogDir = new Path("/tmp", "logs"); String suffix = "logs"; localScript.delete(); Assert.assertFalse(localScript.exists()); hal.generateScript(localScript, workingDir, remoteRootLogDir, suffix); Assert.assertTrue(localScript.exists()); String script = IOUtils.toString(localScript.toURI()); String[] lines = script.split(System.lineSeparator()); Assert.assertEquals(16, lines.length); Assert.assertEquals("#!/bin/bash", lines[0]); Assert.assertEquals("set -e", lines[1]); Assert.assertEquals("set -x", lines[2]); Assert.assertEquals("if [ \"$YARN_SHELL_ID\" == \"1\" ]; then", lines[3]); if (lines[4].contains(app1.toString())) { Assert.assertEquals("\tappId=\"" + app1.toString() + "\"", lines[4]); Assert.assertEquals("\tappId=\"" + app2.toString() + "\"", lines[7]); } else { Assert.assertEquals("\tappId=\"" + app2.toString() + "\"", lines[4]); Assert.assertEquals("\tappId=\"" + app1.toString() + "\"", lines[7]); } Assert.assertEquals("\tuser=\"" + USER + "\"", lines[5]); Assert.assertEquals("elif [ \"$YARN_SHELL_ID\" == \"2\" ]; then", lines[6]); Assert.assertEquals("\tuser=\"" + USER + "\"", lines[8]); Assert.assertEquals("else", lines[9]); Assert.assertEquals("\techo \"Unknown Mapping!\"", lines[10]); Assert.assertEquals("\texit 1", lines[11]); Assert.assertEquals("fi", lines[12]); Assert.assertEquals("export HADOOP_CLIENT_OPTS=\"-Xmx1024m\"", lines[13]); Assert.assertTrue(lines[14].startsWith("export HADOOP_CLASSPATH=")); if (proxy) { Assert.assertEquals("\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." + "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " + "-workingDir " + workingDir.toString() + " -remoteRootLogDir " + remoteRootLogDir.toString() + " -suffix " + suffix, lines[15]); } else { Assert.assertEquals("\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." + "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " + "-workingDir " + workingDir.toString() + " -remoteRootLogDir " + remoteRootLogDir.toString() + " -suffix " + suffix + " -noProxy", lines[15]); } }