List of usage examples for java.lang String join
public static String join(CharSequence delimiter, Iterable<? extends CharSequence> elements)
From source file:net.sf.jabref.logic.exporter.ExportFormat.java
/** * Perform the export of {@code database}. * * @param databaseContext the database to export from. * @param file the file to write the resulting export to * @param encoding The encoding of the database * @param entries Contains all entries that should be exported. * @throws IOException if a problem occurred while trying to write to {@code writer} * or read from required resources. * @throws Exception if any other error occurred during export. * @see net.sf.jabref.logic.exporter.IExportFormat#performExport(BibDatabaseContext, String, Charset, List) *//*from w ww . ja v a 2 s .c om*/ @Override public void performExport(final BibDatabaseContext databaseContext, final String file, final Charset encoding, List<BibEntry> entries) throws Exception { Objects.requireNonNull(databaseContext); Objects.requireNonNull(entries); if (entries.isEmpty()) { // Do not export if no entries to export -- avoids exports with only template text return; } Path outFile = Paths.get(file); SaveSession ss = null; if (this.encoding != null) { try { ss = new FileSaveSession(this.encoding, false); } catch (SaveException ex) { // Perhaps the overriding encoding doesn't work? // We will fall back on the default encoding. LOGGER.warn("Can not get save session.", ex); } } if (ss == null) { ss = new FileSaveSession(encoding, false); } try (VerifyingWriter ps = ss.getWriter()) { Layout beginLayout = null; // Check if this export filter has bundled name formatters: // Set a global field, so all layouts have access to the custom name formatters: Globals.prefs.customExportNameFormatters = readFormatterFile(lfFileName); List<String> missingFormatters = new ArrayList<>(1); // Print header try (Reader reader = getReader(lfFileName + ".begin.layout")) { LayoutHelper layoutHelper = new LayoutHelper(reader, Globals.prefs, Globals.journalAbbreviationLoader); beginLayout = layoutHelper.getLayoutFromText(); } catch (IOException ex) { // If an exception was cast, export filter doesn't have a begin // file. } // Write the header if (beginLayout != null) { ps.write(beginLayout.doLayout(databaseContext, encoding)); missingFormatters.addAll(beginLayout.getMissingFormatters()); } /* * Write database entries; entries will be sorted as they appear on the * screen, or sorted by author, depending on Preferences. We also supply * the Set entries - if we are to export only certain entries, it will * be non-null, and be used to choose entries. Otherwise, it will be * null, and be ignored. */ SavePreferences savePrefs = SavePreferences.loadForExportFromPreferences(Globals.prefs); List<BibEntry> sorted = BibDatabaseWriter.getSortedEntries(databaseContext, entries, savePrefs); // Load default layout Layout defLayout; LayoutHelper layoutHelper; try (Reader reader = getReader(lfFileName + ".layout")) { layoutHelper = new LayoutHelper(reader, Globals.prefs, Globals.journalAbbreviationLoader); defLayout = layoutHelper.getLayoutFromText(); } if (defLayout != null) { missingFormatters.addAll(defLayout.getMissingFormatters()); if (!missingFormatters.isEmpty()) { LOGGER.warn(missingFormatters); } } Map<String, Layout> layouts = new HashMap<>(); Layout layout; ExportFormats.entryNumber = 0; for (BibEntry entry : sorted) { ExportFormats.entryNumber++; // Increment entry counter. // Get the layout String type = entry.getType(); if (layouts.containsKey(type)) { layout = layouts.get(type); } else { try (Reader reader = getReader(lfFileName + '.' + type + ".layout")) { // We try to get a type-specific layout for this entry. layoutHelper = new LayoutHelper(reader, Globals.prefs, Globals.journalAbbreviationLoader); layout = layoutHelper.getLayoutFromText(); layouts.put(type, layout); if (layout != null) { missingFormatters.addAll(layout.getMissingFormatters()); } } catch (IOException ex) { // The exception indicates that no type-specific layout // exists, so we // go with the default one. layout = defLayout; } } // Write the entry ps.write(layout.doLayout(entry, databaseContext.getDatabase())); } // Print footer // changed section - begin (arudert) Layout endLayout = null; try (Reader reader = getReader(lfFileName + ".end.layout")) { layoutHelper = new LayoutHelper(reader, Globals.prefs, Globals.journalAbbreviationLoader); endLayout = layoutHelper.getLayoutFromText(); } catch (IOException ex) { // If an exception was thrown, export filter doesn't have an end // file. } // Write footer if (endLayout != null) { ps.write(endLayout.doLayout(databaseContext, this.encoding)); missingFormatters.addAll(endLayout.getMissingFormatters()); } // Clear custom name formatters: Globals.prefs.customExportNameFormatters = null; if (!missingFormatters.isEmpty()) { StringBuilder sb = new StringBuilder("The following formatters could not be found: "); sb.append(String.join(", ", missingFormatters)); LOGGER.warn(sb); } finalizeSaveSession(ss, outFile); } }
From source file:net.sf.jabref.logic.xmp.XMPUtil.java
/** * Helper function for retrieving a BibEntry from the DublinCore metadata * in a PDF file./*from ww w . j av a2s. c om*/ * * To understand how to get hold of a XMPSchemaDublinCore have a look in the * test cases for XMPUtil. * * The BibEntry is build by mapping individual fields in the dublin core * (like creator, title, subject) to fields in a bibtex entry. * * @param dcSchema * The document information from which to build a BibEntry. * * @return The bibtex entry found in the document information. */ public static Optional<BibEntry> getBibtexEntryFromDublinCore(XMPSchemaDublinCore dcSchema, JabRefPreferences prefs) { BibEntry entry = new BibEntry(); /** * Contributor -> Editor */ List<String> contributors = dcSchema.getContributors(); if ((contributors != null) && !contributors.isEmpty()) { entry.setField(FieldName.EDITOR, String.join(" and ", contributors)); } /** * Author -> Creator */ List<String> creators = dcSchema.getCreators(); if ((creators != null) && !creators.isEmpty()) { entry.setField(FieldName.AUTHOR, String.join(" and ", creators)); } /** * Year + Month -> Date */ List<String> dates = dcSchema.getSequenceList("dc:date"); if ((dates != null) && !dates.isEmpty()) { String date = dates.get(0).trim(); Calendar c = null; try { c = DateConverter.toCalendar(date); } catch (IOException ignored) { // Ignored } if (c != null) { entry.setField(FieldName.YEAR, String.valueOf(c.get(Calendar.YEAR))); if (date.length() > 4) { entry.setField(FieldName.MONTH, MonthUtil.getMonthByIndex(c.get(Calendar.MONTH)).bibtexFormat); } } } /** * Abstract -> Description */ String s = dcSchema.getDescription(); if (s != null) { entry.setField(FieldName.ABSTRACT, s); } /** * Identifier -> DOI */ s = dcSchema.getIdentifier(); if (s != null) { entry.setField(FieldName.DOI, s); } /** * Publisher -> Publisher */ List<String> publishers = dcSchema.getPublishers(); if ((publishers != null) && !publishers.isEmpty()) { entry.setField("publishers", String.join(" and ", publishers)); } /** * Relation -> bibtexkey * * We abuse the relationship attribute to store all other values in the * bibtex document */ List<String> relationships = dcSchema.getRelationships(); if (relationships != null) { for (String r : relationships) { if (r.startsWith("bibtex/")) { r = r.substring("bibtex/".length()); int i = r.indexOf('/'); if (i != -1) { entry.setField(r.substring(0, i), r.substring(i + 1)); } } } } /** * Rights -> Rights */ s = dcSchema.getRights(); if (s != null) { entry.setField("rights", s); } /** * Source -> Source */ s = dcSchema.getSource(); if (s != null) { entry.setField("source", s); } /** * Subject -> Keywords */ List<String> subjects = dcSchema.getSubjects(); if (subjects != null) { entry.addKeywords(subjects, prefs.get(JabRefPreferences.KEYWORD_SEPARATOR)); } /** * Title -> Title */ s = dcSchema.getTitle(); if (s != null) { entry.setField(FieldName.TITLE, s); } /** * Type -> Type */ List<String> l = dcSchema.getTypes(); if ((l != null) && !l.isEmpty()) { s = l.get(0); if (s != null) { entry.setType(s); } } return entry.getFieldNames().isEmpty() ? Optional.empty() : Optional.of(entry); }
From source file:com.ggvaidya.scinames.dataset.BinomialChangesSceneController.java
private void setupTableWithBinomialChanges() { changesTableView.setEditable(false); changesTableView.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE); changesTableView.setItems(potentialChanges); changesTableView.getColumns().clear(); TableColumn<PotentialChange, ChangeType> colChangeType = new TableColumn<>("Type"); colChangeType.setCellFactory(ComboBoxTableCell.forTableColumn(new ChangeTypeStringConverter(), ChangeType.ADDITION, ChangeType.DELETION, ChangeType.RENAME, ChangeType.LUMP, ChangeType.SPLIT, ChangeType.COMPLEX, ChangeType.ERROR)); colChangeType.setCellValueFactory(new PropertyValueFactory<>("type")); colChangeType.setPrefWidth(100.0);//from w w w . j a va 2 s . com colChangeType.setEditable(true); changesTableView.getColumns().add(colChangeType); TableColumn<PotentialChange, ObservableSet<Name>> colChangeFrom = new TableColumn<>("From"); colChangeFrom.setCellFactory(TextFieldTableCell.forTableColumn(new NameSetStringConverter())); colChangeFrom.setCellValueFactory(new PropertyValueFactory<>("from")); colChangeFrom.setPrefWidth(200.0); colChangeFrom.setEditable(true); changesTableView.getColumns().add(colChangeFrom); TableColumn<PotentialChange, ObservableSet<Name>> colChangeTo = new TableColumn<>("To"); colChangeTo.setCellFactory(TextFieldTableCell.forTableColumn(new NameSetStringConverter())); colChangeTo.setCellValueFactory(new PropertyValueFactory<>("to")); colChangeTo.setPrefWidth(200.0); colChangeTo.setEditable(true); changesTableView.getColumns().add(colChangeTo); TableColumn<PotentialChange, String> colDataset = new TableColumn<>("Dataset"); colDataset.setCellValueFactory(cvf -> { return new ReadOnlyStringWrapper(cvf.getValue().getDataset().toString()); }); colDataset.setPrefWidth(150.0); changesTableView.getColumns().add(colDataset); TableColumn<PotentialChange, SimplifiedDate> dateCol = new TableColumn<>("Date"); dateCol.setCellFactory( TextFieldTableCell.forTableColumn(new SimplifiedDate.SimplifiedDateStringConverter())); dateCol.setCellValueFactory(cvf -> new ReadOnlyObjectWrapper<>(cvf.getValue().getDataset().getDate())); dateCol.setPrefWidth(150); dateCol.setSortable(true); dateCol.setSortType(SortType.ASCENDING); changesTableView.getColumns().add(dateCol); changesTableView.getSortOrder().add(dateCol); TableColumn<PotentialChange, String> colChangeSummary = new TableColumn<>("Changes summary"); colChangeSummary.setCellValueFactory(cvf -> { Set<Change> changes = changesByPotentialChange.get(cvf.getValue()); return new ReadOnlyStringWrapper(changes.size() + ": " + changes.stream().map(ch -> ch.toString()).collect(Collectors.joining("; "))); }); colChangeSummary.setPrefWidth(200.0); changesTableView.getColumns().add(colChangeSummary); /* TableColumn<PotentialChange, String> colExplicit = new TableColumn<>("Explicit or implicit?"); colExplicit.setCellValueFactory( (TableColumn.CellDataFeatures<Change, String> features) -> new ReadOnlyStringWrapper( features.getValue().getDataset().isChangeImplicit(features.getValue()) ? "Implicit" : "Explicit" ) ); tv.getColumns().add(colExplicit); ChangeFilter cf = binomialChangesView.getProjectView().getProject().getChangeFilter(); TableColumn<Change, String> colFiltered = new TableColumn<>("Eliminated by filter?"); colFiltered.setCellValueFactory( (TableColumn.CellDataFeatures<Change, String> features) -> new ReadOnlyStringWrapper( cf.test(features.getValue()) ? "Allowed" : "Eliminated" ) ); tv.getColumns().add(colFiltered); */ TableColumn<PotentialChange, String> colNote = new TableColumn<>("Note"); colNote.setCellFactory(TextFieldTableCell.forTableColumn()); colNote.setCellValueFactory(new PropertyValueFactory<>("note")); colNote.setPrefWidth(100.0); changesTableView.getColumns().add(colNote); TableColumn<PotentialChange, String> colReason = new TableColumn<>("Reason"); colReason.setCellValueFactory(cvf -> new ReadOnlyStringWrapper(calculateReason(cvf.getValue()))); colReason.setPrefWidth(100.0); changesTableView.getColumns().add(colReason); TableColumn<PotentialChange, String> colReasonDate = new TableColumn<>("ReasonDate"); colReasonDate.setCellValueFactory(cvf -> { String result; Set<SimplifiedDate> dates = calculateReasonDate(cvf.getValue()); if (dates.size() > 1) { result = "(" + dates.size() + ") " + dates.stream().distinct().sorted() .map(sd -> sd.asYYYYmmDD("-")).collect(Collectors.joining("|")); } else if (dates.size() == 1) { result = dates.iterator().next().asYYYYmmDD("-"); } else { result = "NA"; } return new ReadOnlyStringWrapper(result); }); colReasonDate.setPrefWidth(100.0); changesTableView.getColumns().add(colReasonDate); TableColumn<PotentialChange, String> colCitations = new TableColumn<>("Citations"); colCitations.setCellValueFactory( (TableColumn.CellDataFeatures<PotentialChange, String> features) -> new ReadOnlyStringWrapper( features.getValue().getCitationStream().map(citation -> citation.getCitation()).sorted() .collect(Collectors.joining("; ")))); changesTableView.getColumns().add(colCitations); TableColumn<PotentialChange, String> colGenera = new TableColumn<>("Genera"); colGenera.setCellValueFactory( (TableColumn.CellDataFeatures<PotentialChange, String> features) -> new ReadOnlyStringWrapper( String.join(", ", features.getValue().getAllNames().stream().map(n -> n.getGenus()) .distinct().sorted().collect(Collectors.toList())))); changesTableView.getColumns().add(colGenera); TableColumn<PotentialChange, String> colSpecificEpithet = new TableColumn<>("Specific epithets"); colSpecificEpithet.setCellValueFactory( (TableColumn.CellDataFeatures<PotentialChange, String> features) -> new ReadOnlyStringWrapper(String .join(", ", features.getValue().getAllNames().stream().map(n -> n.getSpecificEpithet()) .filter(s -> s != null).distinct().sorted().collect(Collectors.toList())))); changesTableView.getColumns().add(colSpecificEpithet); // The infraspecific string. TableColumn<PotentialChange, String> colInfraspecificEpithet = new TableColumn<>("Infraspecific epithets"); colInfraspecificEpithet.setCellValueFactory( (TableColumn.CellDataFeatures<PotentialChange, String> features) -> new ReadOnlyStringWrapper( String.join(", ", features.getValue().getAllNames().stream() .map(n -> n.getInfraspecificEpithetsAsString()).filter(s -> s != null) .distinct().sorted().collect(Collectors.toList())))); changesTableView.getColumns().add(colInfraspecificEpithet); // The very last epithet of all TableColumn<PotentialChange, String> colTerminalEpithet = new TableColumn<>("Terminal epithet"); colTerminalEpithet.setCellValueFactory( (TableColumn.CellDataFeatures<PotentialChange, String> features) -> new ReadOnlyStringWrapper( String.join(", ", features.getValue().getAllNames().stream().map(n -> { List<Name.InfraspecificEpithet> infraspecificEpithets = n.getInfraspecificEpithets(); if (!infraspecificEpithets.isEmpty()) { return infraspecificEpithets.get(infraspecificEpithets.size() - 1).getValue(); } else { return n.getSpecificEpithet(); } }).filter(s -> s != null).distinct().sorted().collect(Collectors.toList())))); changesTableView.getColumns().add(colTerminalEpithet); TableColumn<PotentialChange, String> dateForRCol = new TableColumn<>("DateYMD"); dateForRCol.setCellValueFactory( cvf -> new ReadOnlyObjectWrapper<>(cvf.getValue().getDataset().getDate().asYYYYmmDD("-"))); changesTableView.getColumns().add(dateForRCol); // Properties TableColumn<PotentialChange, String> colProperties = new TableColumn<>("Properties"); colProperties.setCellValueFactory( (TableColumn.CellDataFeatures<PotentialChange, String> features) -> new ReadOnlyStringWrapper( features.getValue().getProperties().entrySet().stream() .map(entry -> entry.getKey() + ": " + entry.getValue()).sorted() .collect(Collectors.joining("; ")))); changesTableView.getColumns().add(colProperties); fillTableWithBinomialChanges(); // When someone selects a cell in the Table, try to select the appropriate data in the // additional data view. changesTableView.getSelectionModel().getSelectedItems() .addListener((ListChangeListener<PotentialChange>) lcl -> { AdditionalData aData = additionalDataCombobox.getSelectionModel().getSelectedItem(); if (aData != null) { aData.onSelectChange(changesTableView.getSelectionModel().getSelectedItems()); } }); // Create a right-click menu for table rows. changesTableView.setRowFactory(table -> { TableRow<PotentialChange> row = new TableRow<>(); row.setOnContextMenuRequested(event -> { if (row.isEmpty()) return; // We don't currently use the clicked change, since currently all options // change *all* the selected changes, but this may change in the future. PotentialChange change = row.getItem(); ContextMenu changeMenu = new ContextMenu(); Menu lookupChange = new Menu("Look up change"); lookupChange.getItems().addAll(changesByPotentialChange.getOrDefault(change, new HashSet<>()) .stream() .map(ch -> createMenuItem(ch.toString() + " in " + ch.getDataset().toString(), action -> { binomialChangesView.getProjectView().openDetailedView(ch); })).collect(Collectors.toList())); changeMenu.getItems().add(lookupChange); changeMenu.getItems().add(new SeparatorMenuItem()); Menu searchForName = new Menu("Search for name"); searchForName.getItems().addAll( change.getAllNames().stream().sorted().map(n -> createMenuItem(n.getFullName(), action -> { binomialChangesView.getProjectView().openDetailedView(n); })).collect(Collectors.toList())); changeMenu.getItems().add(searchForName); changeMenu.getItems().add(new SeparatorMenuItem()); // Create a submenu for tags and urls. String note = change.noteProperty().get(); Menu removeTags = new Menu("Tags"); removeTags.getItems().addAll(change.getTags().stream().sorted() .map(tag -> new MenuItem(tag.getName())).collect(Collectors.toList())); Menu lookupURLs = new Menu("Lookup URL"); change.getURIs().stream().sorted().map(uri -> { return createMenuItem(uri.toString(), evt -> { try { Desktop.getDesktop().browse(uri); } catch (IOException ex) { LOGGER.warning("Could not open URL '" + uri + "': " + ex); } }); }).forEach(mi -> lookupURLs.getItems().add(mi)); changeMenu.getItems().add(lookupURLs); changeMenu.show(binomialChangesView.getScene().getWindow(), event.getScreenX(), event.getScreenY()); }); return row; }); LOGGER.info("setupTableWithChanges() completed"); }
From source file:com.cdd.bao.template.ClipboardSchema.java
private static void formatGroupTSV(List<String> lines, Schema.Group group) { List<String> cols = new ArrayList<>(); cols.add(group.name);//from w w w . java2s .c o m cols.add(group.descr.replace("\n", " ")); cols.add(""); for (String g : group.groupNest()) cols.add(g); cols.add(Util.safeString(group.groupURI)); lines.add(String.join("\t", cols)); for (Schema.Assignment assn : group.assignments) { cols.clear(); cols.add(assn.name); cols.add(assn.descr.replace("\n", " ")); cols.add(assn.propURI); for (String g : assn.groupNest()) cols.add(g); lines.add(String.join("\t", cols)); } for (Schema.Group subgrp : group.subGroups) formatGroupTSV(lines, subgrp); }
From source file:org.outofbits.sesame.schemagen.SchemaGeneration.java
/** * Requests the RDF file of the vocabulary by using the given HTTP {@link java.net.URL}. The * header-entry ACCEPT contains all supported {@link RDFFormat}s, where the given * {@link RDFFormat} is the preferred one. The responded rdf file will be parsed and a * {@link Model} containing all statements will be returned. An {@link IOException} will be * thrown, if the rdf file cannot be accessed or read. {@link URISyntaxException} will be thrown, * if the given {@link java.net.URL} has a syntax error. * * @param url the url, where the vocabulary is located and accessible by using HTTP. It must * not be null./*w w w. j a v a 2 s .c om*/ * @param format the format of the document representing the vocabulary. The format can be null. * @return {@link Model} containing all statements of the vocabulary located at the given * {@link java.net.URL}. * @throws IOException if the rdf file cannot be accessed or read. * @throws URISyntaxException if the given {@link java.net.URL} has a syntax error. */ private Model readVocabularyFromHTTPSource(java.net.URL url, RDFFormat format) throws IOException, URISyntaxException { assert url != null && url.getProtocol().equals("http"); HttpClientBuilder clientBuilder = HttpClientBuilder.create().setUserAgent(HTTP_USER_AGENT); HttpUriRequest vocabularyGETRequest = RequestBuilder.get().setUri(url.toURI()) .setHeader(HttpHeaders.ACCEPT, String.join(", ", RDFFormat.getAcceptParams(supportedRDFFormats.values(), false, format))) .build(); try (CloseableHttpClient client = clientBuilder.build()) { CloseableHttpResponse response = client.execute(vocabularyGETRequest); if (response.getStatusLine().getStatusCode() != 200) { throw new IOException(String.format("The given vocabulary can not requested from '%s'. %d: %s", url, response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase())); } String responseContentType = response.getEntity().getContentType().getValue(); Optional<RDFFormat> optionalResponseRdfFormat = Rio.getParserFormatForMIMEType(responseContentType); try (InputStream vocabResponseStream = response.getEntity().getContent()) { if (optionalResponseRdfFormat.isPresent()) { return Rio.parse(vocabResponseStream, "", optionalResponseRdfFormat.get()); } else { if (format == null) { throw new IOException( String.format("The returned content type (%s) from '%s' is not supported", responseContentType, url)); } try { return Rio.parse(vocabResponseStream, "", format); } catch (RDFParseException | UnsupportedRDFormatException e) { throw new IOException(String.format( "The returned content type (%s) from '%s' is not supported. Fallback to the given format %s, but an error occurred.", responseContentType, url, format), e); } } } } }
From source file:de.fosd.jdime.config.CommandLineConfigSource.java
@Override protected Optional<String> getMapping(String key) { if (ARG_LIST.equals(key)) { List<String> argList = cmdLine.getArgList(); if (argList.isEmpty()) { return Optional.empty(); } else {/* w w w. jav a 2 s. c om*/ return Optional.of(String.join(ARG_LIST_SEP, argList)); } } if (!options.hasOption(key)) { return Optional.empty(); } Option opt = options.getOption(key); String optName = opt.getOpt(); if (opt.hasArg()) { return Optional.ofNullable(cmdLine.getOptionValue(optName)); } else { return Optional.of(cmdLine.hasOption(optName) ? "true" : "false"); } }
From source file:io.syndesis.jsondb.impl.SqlJsonDB.java
private int deleteJsonRecords(Handle dbi, String baseDBPath, String like) { LinkedList<String> params = getAllParentPaths(baseDBPath); StringBuilder sql = new StringBuilder("DELETE from jsondb where path LIKE ?"); if (!params.isEmpty()) { sql.append(" OR path in ( ").append(String.join(", ", Collections.nCopies(params.size(), "?"))) .append(" )"); }/* w ww . j a v a2 s . c o m*/ params.addFirst(like); return dbi.update(sql.toString(), params.toArray()); }
From source file:com.hortonworks.streamline.streams.actions.storm.topology.StormTopologyActionsImpl.java
@Override public void deploy(TopologyLayout topology, String mavenArtifacts, TopologyActionContext ctx, String asUser) throws Exception { ctx.setCurrentAction("Adding artifacts to jar"); Path jarToDeploy = addArtifactsToJar(getArtifactsLocation(topology)); ctx.setCurrentAction("Creating Storm topology YAML file"); String fileName = createYamlFile(topology); ctx.setCurrentAction("Deploying topology via 'storm jar' command"); List<String> commands = new ArrayList<String>(); commands.add(stormCliPath);/*from w w w . j a va 2 s .c om*/ commands.add("jar"); commands.add(jarToDeploy.toString()); commands.addAll(getExtraJarsArg(topology)); commands.addAll(getMavenArtifactsRelatedArgs(mavenArtifacts)); commands.addAll(getNimbusConf()); commands.addAll(getSecuredClusterConf(asUser)); commands.add("org.apache.storm.flux.Flux"); commands.add("--remote"); commands.add(fileName); LOG.info("Deploying Application {}", topology.getName()); LOG.info(String.join(" ", commands)); ShellProcessResult shellProcessResult = executeShellProcess(commands); int exitValue = shellProcessResult.exitValue; if (exitValue != 0) { LOG.error("Topology deploy command failed - exit code: {} / output: {}", exitValue, shellProcessResult.stdout); String[] lines = shellProcessResult.stdout.split("\\n"); String errors = Arrays.stream(lines).filter(line -> line.startsWith("Exception")) .collect(Collectors.joining(", ")); Pattern pattern = Pattern.compile("Topology with name `(.*)` already exists on cluster"); Matcher matcher = pattern.matcher(errors); if (matcher.find()) { throw new TopologyAlreadyExistsOnCluster(matcher.group(1)); } else { throw new Exception( "Topology could not be deployed successfully: storm deploy command failed with " + errors); } } }
From source file:business.services.LabRequestService.java
@SuppressWarnings("unchecked") @Transactional/*w w w . j a va2 s . c o m*/ public void generateLabRequests(String processInstanceId) { HistoricProcessInstance instance = requestService.getProcessInstance(processInstanceId); Object var = instance.getProcessVariables().get("lab_request_labs"); log.info("instance: " + instance.getId()); if (var != null && var instanceof Collection<?>) { List<LabRequest> labRequests = new ArrayList<LabRequest>(); SortedSet<Integer> labNumbers = new TreeSet<>((Collection<Integer>) var); Set<User> hubUsers = new HashSet<>(); for (Integer labNumber : labNumbers) { Lab lab = labService.findByNumber(labNumber); HistoricTaskInstance task = findLabRequestTaskForLab(labNumber, instance.getId()); // create lab requests LabRequest labRequest = new LabRequest(); labRequest.setTimeCreated(new Date()); labRequest.setLab(lab); labRequest.setProcessInstanceId(processInstanceId); labRequest.setTaskId(task.getId()); labRequest = labRequestRepository.save(labRequest); // set initial status labRequest = updateStatus(labRequest, Status.WAITING_FOR_LAB_APPROVAL); labRequest.setHubAssistanceRequested(lab.isHubAssistanceEnabled()); if (lab.isHubAssistanceEnabled()) { hubUsers.addAll(userService.findHubUsersForLab(lab)); } ExcerptList excerptList = excerptListService.findByProcessInstanceId(processInstanceId); List<PathologyItem> pathologyList = new ArrayList<PathologyItem>(); for (ExcerptEntry entry : excerptList.getEntries()) { if (entry.isSelected() && labNumber.equals(entry.getLabNumber())) { pathologyList.add(new PathologyItem(labRequest.getId(), entry)); } } labRequest.setPathologyList(pathologyList); labRequest = labRequestRepository.save(labRequest); log.info("Saved lab request " + labRequest.getId() + " for lab " + labNumber + " with " + pathologyList.size() + " pathology items."); labRequests.add(labRequest); } Map<Integer, LabRequestRepresentation> representationMap = new TreeMap<>(); // notify labs by mail for (LabRequest labRequest : labRequests) { LabRequestRepresentation representation = new LabRequestRepresentation(labRequest); transferLabRequestData(representation, false); if (representation.getLab() == null) { log.warn("No lab for lab request " + representation.getLabRequestCode() + " while gerating lab requests."); } else { representationMap.put(representation.getLab().getNumber(), representation); try { mailService.notifyLab(representation); } catch (EmailError e) { log.warn("No mail sent to lab " + representation.getLab().getNumber() + " for lab request " + representation.getLabRequestCode() + ". Email addresses: '" + representation.getLab().getEmailAddresses() == null ? "" : String.join(", ", representation.getLab().getEmailAddresses()) + "'."); // FIXME: return error messages. } } } // notify hub users by mail for (User u : hubUsers) { // build list of lab request representations for the lab requests for labs // associated with the hub user. List<LabRequestRepresentation> representations = new ArrayList<>(); List<String> labRequestCodes = new ArrayList<>(); for (Lab l : u.getHubLabs()) { if (l.isHubAssistanceEnabled()) { LabRequestRepresentation representation = representationMap.get(l.getNumber()); if (representation != null) { representations.add(representation); labRequestCodes.add(representation.getLabRequestCode()); } } } String labRequestCodesString = String.join(", ", labRequestCodes); // send mail to hub user try { mailService.notifyHubuser(u, representations); } catch (EmailError e) { log.warn("No mail sent to hub user " + u.getUsername() + " for lab requests " + labRequestCodesString + ". Email address: '" + u.getContactData() == null ? "" : u.getContactData().getEmail() + "'."); // FIXME: return error messages. } } } }
From source file:org.codice.ddf.confluence.source.ConfluenceSource.java
private String getSpaceQuery(String query) { if (!confluenceSpaces.isEmpty()) { StringBuilder sb = new StringBuilder(); sb.append(query);//from www. j a v a 2 s. c o m if (StringUtils.isNotEmpty(query.trim())) { sb.append(" AND "); } sb.append("space"); if (excludeSpaces) { sb.append(" NOT IN ("); } else { sb.append(" IN ("); } sb.append(String.join(", ", confluenceSpaces)); sb.append(")"); return sb.toString(); } return query; }