List of usage examples for java.lang System lineSeparator
String lineSeparator
To view the source code for java.lang System lineSeparator.
Click Source Link
From source file:org.jboss.as.test.integration.logging.handlers.SocketHandlerTestCase.java
private void checkLevelsLogged(final JsonLogServer server, final Set<Logger.Level> expectedLevels, final String msg) throws IOException, InterruptedException { executeRequest(msg, Collections.singletonMap("includeLevel", "true")); final List<JsonObject> foundMessages = new ArrayList<>(); for (int i = 0; i < expectedLevels.size(); i++) { final JsonObject foundMessage = server.getLogMessage(DFT_TIMEOUT); if (foundMessage == null) { final String failureMessage = "A log messages was not received within " + DFT_TIMEOUT + " milliseconds." + System.lineSeparator() + "Found the following messages: " + foundMessages + System.lineSeparator() + "Expected the following levels to be logged: " + expectedLevels;//w w w.j a va 2 s .c o m Assert.fail(failureMessage); } foundMessages.add(foundMessage); } Assert.assertEquals(expectedLevels.size(), foundMessages.size()); // Check that we have all the expected levels final Collection<String> levels = expectedLevels.stream().map(Enum::name).collect(Collectors.toList()); final Iterator<JsonObject> iter = foundMessages.iterator(); while (iter.hasNext()) { final JsonObject foundMessage = iter.next(); final String foundLevel = foundMessage.getString("level"); Assert.assertNotNull("Expected a level on " + foundMessage, foundLevel); Assert.assertTrue(String.format("Level %s was logged, but not expected.", foundLevel), levels.remove(foundLevel)); iter.remove(); } // The string levels should be empty, if not we're missing an expected level Assert.assertTrue("Found levels that did not appear to be logged: " + levels, levels.isEmpty()); }
From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotVolumeSyncValidator.java
public boolean snapshotCreation(AmazonEC2Client ec2Client, Volume vol, Date date) { if ((date == null) || (ec2Client == null) || (vol == null)) { return false; }/*from w ww. j av a 2 s.c o m*/ try { Collection<Tag> tags_volume = getResourceTags(vol); String volumeAttachmentInstance = "none"; try { volumeAttachmentInstance = vol.getAttachments().get(0).getInstanceId(); } catch (Exception e) { logger.debug("Volume not attached to instance: " + vol.getVolumeId()); } String description = "sync_snapshot " + vol.getVolumeId() + " by Eidetic Synchronizer at " + date.toString() + ". Volume attached to " + volumeAttachmentInstance; Snapshot current_snap; try { current_snap = createSnapshotOfVolume(ec2Client, vol, description, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_ + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } try { setResourceTags(ec2Client, current_snap, tags_volume, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_ + "\",Event\"Error\", Error=\"error adding tags to snapshot\", Snapshot_id=\"" + current_snap.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } } catch (Exception e) { logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error, Error=\"error in snapshotCreation\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } return true; }
From source file:com.netflix.genie.web.jobs.workflow.impl.InitialSetupTask.java
@VisibleForTesting void createJobRequestEnvironmentVariables(final Writer writer, final JobRequest jobRequest) throws IOException { // create environment variable for the command tags/criteria in the job request writer.write(// ww w.ja v a 2s . c om JobConstants.EXPORT + JobConstants.GENIE_REQUESTED_COMMAND_TAGS_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + tagsToString(jobRequest.getCommandCriteria()) + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); final List<ClusterCriteria> clusterCriterias = jobRequest.getClusterCriterias(); final List<String> clusterCriteriasStrings = new ArrayList<>(clusterCriterias.size()); for (ClusterCriteria clusterCriteria : clusterCriterias) { clusterCriteriasStrings.add("[" + tagsToString(clusterCriteria.getTags()) + "]"); } // Append new line writer.write(System.lineSeparator()); // create environment variable for the list of cluster tags/criteria in the job request as a single // value in the form "[[x,y,z],[a,b,c]]" writer.write( JobConstants.EXPORT + JobConstants.GENIE_REQUESTED_CLUSTER_TAGS_ENV_VAR + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + "[" + StringUtils.join(clusterCriteriasStrings, ',') + "]" + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); // create environment variables for individual tags/criteria in the job request for (int i = 0; i < clusterCriterias.size(); i++) { final ClusterCriteria clusterCriteria = clusterCriterias.get(i); // create environment variable for the job name writer.write(JobConstants.EXPORT + JobConstants.GENIE_REQUESTED_CLUSTER_TAGS_ENV_VAR + "_" + i + JobConstants.EQUALS_SYMBOL + JobConstants.DOUBLE_QUOTE_SYMBOL + tagsToString(clusterCriteria.getTags()) + JobConstants.DOUBLE_QUOTE_SYMBOL + LINE_SEPARATOR); } // Append new line writer.write(LINE_SEPARATOR); }
From source file:com.hybridbpm.core.util.HybridbpmCoreUtil.java
public static String updateConnectorCodeInParameters(Module Module) { ConnectorModel connectorModel = HybridbpmCoreUtil.jsonToObject(Module.getModel(), ConnectorModel.class); String code = Module.getCode(); try {//from w w w.jav a 2 s . c om StringBuilder parametersBuilder = new StringBuilder(); for (FieldModel field : connectorModel.getInParameters()) { parametersBuilder.append(FieldModelUtil.getConnectorParameterTemplate(field)) .append(System.lineSeparator()); } return replaceGeneratedCode(code, parametersBuilder.toString(), SyntaxConstant.CONNECTOR_IN_PARAMETERS_START, SyntaxConstant.CONNECTOR_IN_PARAMETERS_END); } catch (Exception ex) { logger.log(Level.SEVERE, ex.getMessage(), ex); return code; } }
From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotChecker.java
public boolean snapshotCreation(AmazonEC2Client ec2Client, Volume vol, String period, Date date) { if ((date == null) || (ec2Client == null) || (vol == null) || (period == null)) { return false; }/* ww w. j a v a2 s . c o m*/ try { if ("day".equalsIgnoreCase(period)) { } else if ("hour".equalsIgnoreCase(period)) { } else if ("week".equalsIgnoreCase(period)) { } else if ("month".equalsIgnoreCase(period)) { } else { logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId() + "\""); return false; } Collection<Tag> tags_volume = getResourceTags(vol); String volumeAttachmentInstance = "none"; try { volumeAttachmentInstance = vol.getAttachments().get(0).getInstanceId(); } catch (Exception e) { logger.debug("Volume not attached to instance: " + vol.getVolumeId()); } String description = period + "_snapshot " + vol.getVolumeId() + " by snapshot checker at " + date.toString() + ". Volume attached to " + volumeAttachmentInstance; Snapshot current_snap; try { current_snap = createSnapshotOfVolume(ec2Client, vol, description, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error\", Error=\"error creating snapshot from volume\", Volume_id=\"" + vol.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } try { setResourceTags(ec2Client, current_snap, tags_volume, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error\", Error=\"error adding tags to snapshot\", Snapshot_id=\"" + current_snap.getSnapshotId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } } catch (Exception e) { logger.info("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error\", Error=\"error in snapshotCreation\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); return false; } return true; }
From source file:com.rackspacecloud.blueflood.http.HttpIntegrationTestBase.java
protected String[] getBodyArray(HttpResponse response) throws IOException { try {//from ww w .java2 s . c o m StringWriter sw = new StringWriter(); IOUtils.copy(response.getEntity().getContent(), sw); return sw.toString().split(System.lineSeparator()); } finally { IOUtils.closeQuietly(response.getEntity().getContent()); } }
From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotVolumeNoTime.java
public boolean snapshotDeletion(AmazonEC2Client ec2Client, Volume vol, String period, Integer keep) { if ((keep == null) || (ec2Client == null) || (vol == null) || (period == null)) { return false; }/*from w ww. ja v a2 s .co m*/ try { List<Snapshot> del_snapshots = getAllSnapshotsOfVolume(ec2Client, vol, numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); List<Snapshot> deletelist = new ArrayList(); for (Snapshot snapshot : del_snapshots) { String desc = snapshot.getDescription(); if ("week".equals(period) && desc.startsWith("week_snapshot")) { deletelist.add(snapshot); } else if ("day".equals(period) && desc.startsWith("day_snapshot")) { deletelist.add(snapshot); } else if ("hour".equals(period) && desc.startsWith("hour_snapshot")) { deletelist.add(snapshot); } else if ("month".equals(period) && desc.startsWith("month_snapshot")) { deletelist.add(snapshot); } } List<Snapshot> sortedDeleteList = new ArrayList<>(deletelist); sortSnapshotsByDate(sortedDeleteList); int delta = sortedDeleteList.size() - keep; for (int i : range(0, delta - 1)) { try { deleteSnapshot(ec2Client, sortedDeleteList.get(i), numRetries_, maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_); } catch (Exception e) { logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error\", Error=\"error deleting snapshot\", Snapshot_id=\"" + sortedDeleteList.get(i).getSnapshotId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); } } } catch (Exception e) { logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_ + "\",Event=\"Error\", Error=\"error in snapshotDeletion\", stacktrace=\"" + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\""); } return true; }
From source file:kmi.taa.core.PredicateObjectRetriever.java
public String readRDF(String url, String lang) { StringBuilder builder = new StringBuilder(); Model model = ModelFactory.createDefaultModel(); try {/*from w w w.ja v a2 s . co m*/ if (lang != null) { model.read(url, lang); } else { model.read(url); } } catch (Exception e) { return ""; } StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); Property ppty = stmt.getPredicate(); RDFNode obj = stmt.getObject(); String objcleaned = removeType(obj.toString()); try { builder.append(URLDecoder.decode(ppty.getURI(), "UTF-8") + "\t" + objcleaned); builder.append(System.lineSeparator()); } catch (UnsupportedEncodingException e) { continue; } } return builder.toString(); }
From source file:ddf.catalog.impl.operations.CreateOperations.java
/** Helper method to build ingest log strings */ private String buildIngestLog(CreateRequest createReq) { StringBuilder strBuilder = new StringBuilder(); List<Metacard> metacards = createReq.getMetacards(); String metacardTitleLabel = "Metacard Title: "; String metacardIdLabel = "Metacard ID: "; for (int i = 0; i < metacards.size(); i++) { Metacard card = metacards.get(i); strBuilder.append(System.lineSeparator()).append("Batch #: ").append(i + 1).append(" | "); if (card != null) { if (card.getTitle() != null) { strBuilder.append(metacardTitleLabel).append(card.getTitle()).append(" | "); }/* w w w . java2s . c o m*/ if (card.getId() != null) { strBuilder.append(metacardIdLabel).append(card.getId()).append(" | "); } } else { strBuilder.append("Null Metacard"); } } return strBuilder.toString(); }
From source file:org.apache.openaz.xacml.std.pip.engines.csv.CSVEngine.java
@Override public PIPResponse getAttributes(PIPRequest pipRequest, PIPFinder pipFinder) throws PIPException { //// w w w . j a va2 s . co m // Do we have any resolvers defined? // if (this.csvResolvers.size() == 0) { throw new IllegalStateException(this.getClass().getCanonicalName() + " is not configured"); } // // Do any of our resolvers support this? // List<CSVResolver> resolvers = new ArrayList<CSVResolver>(); for (CSVResolver resolver : this.csvResolvers) { if (resolver.supportRequest(pipRequest)) { resolvers.add(resolver); } } if (resolvers.size() == 0) { if (this.logger.isDebugEnabled()) { this.logger.debug("does not support this pip request: " + pipRequest); } return StdPIPResponse.PIP_RESPONSE_EMPTY; } if (this.logger.isDebugEnabled()) { this.logger.debug("supports this pip request: " + pipRequest); } // // We have at least one, have the resolvers prepare themselves. // List<Map<Integer, List<AttributeValue<?>>>> listParameters = new ArrayList<Map<Integer, List<AttributeValue<?>>>>(); for (CSVResolver resolver : resolvers) { Map<Integer, List<AttributeValue<?>>> map = resolver.getColumnParameterValues(this, pipRequest, pipFinder); // // If the resolver cannot find all its parameter values, then we // if (map == null || map.isEmpty()) { this.logger.warn("Resolver could not find parameters."); return StdPIPResponse.PIP_RESPONSE_EMPTY; } listParameters.add(map); } // // Look at each line of the file to see if it matches the (non-unique) criteria in the parameters // and add the value in the associated column from the CSV file to the list of response Attributes. // StdMutablePIPResponse mutablePIPResponse = new StdMutablePIPResponse(); // // for smaller files, this is the index in the allLines List // int lineIndex = 0; // // for big files we need to read one line at a time from the CSVReader // CSVReader csvReader = null; try { if (this.fileIsBig) { csvReader = new CSVReader(new FileReader(csvSourceFile), csvDelimiter, csvQuote, csvSkip); } while (true) { String[] line = null; if (this.fileIsBig) { line = csvReader.readNext(); if (line == null) { // end of file break; } } else { if (lineIndex < this.allLines.size()) { line = this.allLines.get(lineIndex); lineIndex++; } else { // // end of (previously-read) list // break; } } // // Does the line match? // if (!this.doesLineMatch(line, listParameters)) { continue; } // // Ask each resolver to return any attributes from the line // for (CSVResolver resolver : resolvers) { List<Attribute> attributes = resolver.decodeResult(line); if (attributes != null && attributes.size() > 0) { if (this.logger.isDebugEnabled()) { this.logger.debug("resolver returned " + attributes.size() + " attributes"); } mutablePIPResponse.addAttributes(attributes); } } } // // Done reading the file // if (this.logger.isDebugEnabled()) { this.logger.debug("Returning " + mutablePIPResponse.getAttributes().size() + " attributes"); for (Attribute attribute : mutablePIPResponse.getAttributes()) { this.logger.debug(System.lineSeparator() + AttributeUtils.prettyPrint(attribute)); } } return new StdPIPResponse(mutablePIPResponse); } catch (Exception e) { String message = this.getName() + ": Error processing line: " + e; logger.error(message, e); return new StdPIPResponse(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR, e.getMessage())); } finally { if (csvReader != null) { try { csvReader.close(); } catch (IOException e) { this.logger.error("Close CSV Reader: " + e.getLocalizedMessage()); } } } }