List of usage examples for java.lang String join
public static String join(CharSequence delimiter, Iterable<? extends CharSequence> elements)
From source file:io.stallion.requests.CorsResponseHandler.java
protected void handlePreflight(IRequest request, StResponse response) { //Access-Control-Allow-Methods: GET, POST, PUT //Access-Control-Allow-Headers: X-Custom-Header //Content-Type: text/html; charset=utf-8 handleOriginAllowed(request, response); CorsSettings cors = Settings.instance().getCors(); response.addHeader("Access-Control-Allow-Credentials", ((Boolean) cors.isAllowCredentials()).toString().toLowerCase()); response.addHeader("Access-Control-Allow-Methods", cors.getAllowedMethodsString()); List<String> allowHeaders = list(); for (String requestHeader : or(request.getHeader("Access-Control-Request-Headers"), "").split(",")) { requestHeader = requestHeader.trim(); String requestHeaderLower = requestHeader.toLowerCase(); if (cors.getAllowHeaders().contains(requestHeaderLower)) { allowHeaders.add(requestHeader); }//from ww w .j a va 2 s. com } if (allowHeaders.size() > 0) { response.addHeader("Access-Control-Allow-Headers", String.join(",", allowHeaders)); } response.addHeader("Access-Control-Max-Age", cors.getPreflightMaxAge().toString()); response.setContentType("text/html; charset=utf-8"); response.setStatus(200); throw new ResponseComplete(); }
From source file:com.hortonworks.streamline.streams.cluster.bundle.impl.KafkaSinkBundleHintProvider.java
@Override public Map<String, Object> getHintsOnCluster(Cluster cluster) { Map<String, Object> hintClusterMap = new HashMap<>(); try (KafkaMetadataService kafkaMetadataService = KafkaMetadataService.newInstance(environmentService, cluster.getId())) {/*from w ww . j av a 2 s . c o m*/ KafkaMetadataService.Topics topics = kafkaMetadataService.getTopicsFromZk(); hintClusterMap.put(FIELD_NAME_TOPIC, topics.getTopics()); KafkaMetadataService.BrokersInfo<HostPort> brokerHosts = kafkaMetadataService .getBrokerHostPortFromStreamsJson(cluster.getId()); List<HostPort> hosts = brokerHosts.getInfo(); if (hosts != null && !hosts.isEmpty()) { List<String> bootstrapServerList = hosts.stream() .map(hostPort -> String.format("%s:%d", hostPort.getHost(), hostPort.getPort())) .collect(toList()); hintClusterMap.put(FIELD_NAME_BOOTSTRAP_SERVERS, String.join(",", bootstrapServerList)); } String protocol = kafkaMetadataService.getProtocolFromStreamsJson(cluster.getId()); if (!StringUtils.isEmpty(protocol)) { hintClusterMap.put(FIELD_NAME_SECURITY_PROTOCOL, protocol); } } catch (ServiceNotFoundException e) { // we access it from mapping information so shouldn't be here throw new IllegalStateException("Service " + Constants.Kafka.SERVICE_NAME + " in cluster " + cluster.getName() + " not found but mapping information exists."); } catch (ServiceConfigurationNotFoundException e) { // there's KAFKA service but not enough configuration info. } catch (Exception e) { throw new RuntimeException(e); } return hintClusterMap; }
From source file:com.kantenkugel.discordbot.jdocparser.JDoc.java
public static List<Documentation> getJava(final String name) { final String[] noArgNames = name.toLowerCase().split("\\(")[0].split("[#.]"); String className = String.join(".", Arrays.copyOf(noArgNames, noArgNames.length - 1)); String urlPath;//from w w w . j av a 2 s.c o m synchronized (javaJavaDocs) { urlPath = javaJavaDocs.get(name.toLowerCase()); if (urlPath == null) urlPath = javaJavaDocs.get(className); else className = name.toLowerCase(); if (urlPath == null) return Collections.emptyList(); } Map<String, JDocParser.ClassDocumentation> resultMap = new HashMap<>(); InputStream is = null; try { Response res = Bot.httpClient .newCall(new Request.Builder().url(JDocUtil.JAVA_JDOCS_PREFIX + urlPath).get().build()) .execute(); if (!res.isSuccessful()) { JDocUtil.LOG.warn("OkHttp returned failure for java8 index: " + res.code()); return Collections.emptyList(); } is = res.body().byteStream(); JDocParser.parse(JDocUtil.JAVA_JDOCS_PREFIX, urlPath, is, resultMap); } catch (Exception e) { JDocUtil.LOG.error("Error parsing java javadocs for {}", name, e); } finally { if (is != null) try { is.close(); } catch (Exception ignored) { } } if (!resultMap.containsKey(className)) { JDocUtil.LOG.warn("Parser didn't return wanted docs"); return Collections.emptyList(); } JDocParser.ClassDocumentation doc = resultMap.get(className); if (noArgNames.length == 1 || className.equalsIgnoreCase(name)) return Collections.singletonList(doc); String searchObj = name.toLowerCase().substring(className.length() + 1);//class name + seperator dot if (doc.classValues.containsKey(searchObj)) { return Collections.singletonList(doc.classValues.get(searchObj)); } else { boolean fuzzy = false; String fixedSearchObj = searchObj; if (fixedSearchObj.charAt(fixedSearchObj.length() - 1) != ')') { fixedSearchObj += "()"; fuzzy = true; } String[] methodParts = fixedSearchObj.split("[()]"); String methodName = methodParts[0]; if (doc.methodDocs.containsKey(methodName.toLowerCase())) { return getMethodDocs(doc, methodName, fixedSearchObj, fuzzy); } else if (doc.inheritedMethods.containsKey(methodName.toLowerCase())) { return getJava(doc.inheritedMethods.get(methodName.toLowerCase()) + '.' + searchObj); } return Collections.emptyList(); } }
From source file:com.hortonworks.streamline.streams.catalog.topology.component.bundle.impl.KafkaSinkBundleHintProvider.java
@Override public Map<String, Object> getHintsOnCluster(Cluster cluster) { Map<String, Object> hintClusterMap = new HashMap<>(); try (KafkaMetadataService kafkaMetadataService = KafkaMetadataService.newInstance(environmentService, cluster.getId())) {// ww w . ja va2s .c o m KafkaMetadataService.Topics topics = kafkaMetadataService.getTopicsFromZk(); hintClusterMap.put(FIELD_NAME_TOPIC, topics.getTopics()); KafkaMetadataService.BrokersInfo<HostPort> brokerHosts = kafkaMetadataService .getBrokerHostPortFromStreamsJson(cluster.getId()); List<HostPort> hosts = brokerHosts.getInfo(); if (hosts != null && !hosts.isEmpty()) { List<String> bootstrapServerList = hosts.stream() .map(hostPort -> String.format("%s:%d", hostPort.getHost(), hostPort.getPort())) .collect(toList()); hintClusterMap.put(FIELD_NAME_BOOTSTRAP_SERVERS, String.join(",", bootstrapServerList)); } String protocol = kafkaMetadataService.getProtocolFromStreamsJson(cluster.getId()); if (!StringUtils.isEmpty(protocol)) { hintClusterMap.put(FIELD_NAME_SECURITY_PROTOCOL, protocol); } } catch (ServiceNotFoundException e) { // we access it from mapping information so shouldn't be here throw new IllegalStateException("Service " + SERVICE_NAME + " in cluster " + cluster.getName() + " not found but mapping information exists."); } catch (ServiceConfigurationNotFoundException e) { // there's KAFKA service but not enough configuration info. } catch (Exception e) { throw new RuntimeException(e); } return hintClusterMap; }
From source file:business.services.RequestFormService.java
/** * Concatenates first name and last name of the user if user is not null; * returns the empty string otherwise.// w w w. java 2 s. c o m */ public static String getName(User user) { if (user == null) { return ""; } List<String> parts = new ArrayList<>(2); if (user.getFirstName() != null && !user.getFirstName().trim().isEmpty()) { parts.add(user.getFirstName().trim()); } if (user.getLastName() != null && !user.getLastName().trim().isEmpty()) { parts.add(user.getLastName().trim()); } return String.join(" ", parts); }
From source file:com.hortonworks.streamline.streams.runtime.storm.event.sample.StreamlineEventLogger.java
@Override protected String buildLogMessage(EventInfo event) { String timestampStr = dateFormat.format(event.getTs()); List<Object> values = event.getValues(); if (!values.isEmpty()) { final Object eventObj = values.get(0); if (eventObj instanceof StreamlineEvent) { final StreamlineEvent slEvent = (StreamlineEvent) eventObj; Set<String> rootIds; if (EventCorrelationInjector.containsRootIds(slEvent)) { rootIds = EventCorrelationInjector.getRootIds(slEvent); } else { rootIds = Collections.emptySet(); }// w w w . ja v a2 s .co m Set<String> parentIds; if (EventCorrelationInjector.containsParentIds(slEvent)) { parentIds = EventCorrelationInjector.getParentIds(slEvent); } else { parentIds = Collections.emptySet(); } // Date, Marker, Component Name (Streamline), Event ID, Root IDs, Parent IDs, // Event Fields, Header KV, Aux. Fields KV // use DELIMITER to let parser understand it more easily String format = String.join(DELIMITER, new String[] { "%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s" }); return String.format(format, timestampStr, MARKER_FOR_STREAMLINE_EVENT, StormTopologyUtil.extractStreamlineComponentName(event.getComponent()), slEvent.getId(), rootIds, parentIds, ImmutableMap.copyOf(slEvent), slEvent.getHeader().toString(), slEvent.getAuxiliaryFieldsAndValues().toString()); } } // Date, Marker, Component Name (Storm), task ID, Message ID, Values // use comma-separated delimiter since this is not for machine, but for users Object messageId = event.getMessageId(); return String.format("%s,%s,%s,%s,%s,%s", timestampStr, MARKER_FOR_OTHER_EVENT, event.getComponent(), String.valueOf(event.getTask()), (messageId == null ? "" : messageId.toString()), values); }
From source file:it.polimi.diceH2020.SPACE4CloudWS.ml.MLPredictor.java
private String readJsonFile(String file) throws IOException { InputStream inputStream = getClass().getResourceAsStream(file); if (inputStream != null) { return IOUtils.toString(inputStream, StandardCharsets.UTF_8.name()); }/*from w w w.j a v a2 s.com*/ inputStream = getClass().getResourceAsStream("/" + file); if (inputStream != null) { return IOUtils.toString(inputStream, StandardCharsets.UTF_8.name()); } return String.join("\n", Files.readAllLines(Paths.get(file))); }
From source file:net.sf.jabref.importer.fileformat.JSONEntryParser.java
/** * Convert a JSONObject containing a bibJSON entry to a BibEntry * * @param bibJsonEntry The JSONObject to convert * @return the converted BibEntry/* w ww .j av a 2s.c o m*/ */ public BibEntry parseBibJSONtoBibtex(JSONObject bibJsonEntry) { // Fields that are directly accessible at the top level BibJson object String[] singleFieldStrings = { FieldName.YEAR, FieldName.TITLE, FieldName.ABSTRACT, FieldName.MONTH }; // Fields that are accessible in the journal part of the BibJson object String[] journalSingleFieldStrings = { FieldName.PUBLISHER, FieldName.NUMBER, FieldName.VOLUME }; BibEntry entry = new BibEntry(); entry.setType("article"); // Authors if (bibJsonEntry.has("author")) { JSONArray authors = bibJsonEntry.getJSONArray("author"); List<String> authorList = new ArrayList<>(); for (int i = 0; i < authors.length(); i++) { if (authors.getJSONObject(i).has("name")) { authorList.add(authors.getJSONObject(i).getString("name")); } else { LOGGER.info("Empty author name."); } } entry.setField(FieldName.AUTHOR, String.join(" and ", authorList)); } else { LOGGER.info("No author found."); } // Direct accessible fields for (String field : singleFieldStrings) { if (bibJsonEntry.has(field)) { entry.setField(field, bibJsonEntry.getString(field)); } } // Page numbers if (bibJsonEntry.has("start_page")) { if (bibJsonEntry.has("end_page")) { entry.setField(FieldName.PAGES, bibJsonEntry.getString("start_page") + "--" + bibJsonEntry.getString("end_page")); } else { entry.setField(FieldName.PAGES, bibJsonEntry.getString("start_page")); } } // Journal if (bibJsonEntry.has("journal")) { JSONObject journal = bibJsonEntry.getJSONObject("journal"); // Journal title if (journal.has("title")) { entry.setField(FieldName.JOURNAL, journal.getString("title")); } else { LOGGER.info("No journal title found."); } // Other journal related fields for (String field : journalSingleFieldStrings) { if (journal.has(field)) { entry.setField(field, journal.getString(field)); } } } else { LOGGER.info("No journal information found."); } // Keywords if (bibJsonEntry.has("keywords")) { JSONArray keywords = bibJsonEntry.getJSONArray("keywords"); LinkedHashSet<String> keywordList = new LinkedHashSet<>(); for (int i = 0; i < keywords.length(); i++) { if (!keywords.isNull(i)) { keywordList.add(keywords.getString(i)); } } entry.putKeywords(keywordList, Globals.prefs.get(JabRefPreferences.KEYWORD_SEPARATOR)); } // Identifiers if (bibJsonEntry.has("identifier")) { JSONArray identifiers = bibJsonEntry.getJSONArray("identifier"); for (int i = 0; i < identifiers.length(); i++) { String type = identifiers.getJSONObject(i).getString("type"); if ("doi".equals(type)) { entry.setField(FieldName.DOI, identifiers.getJSONObject(i).getString("id")); } else if ("pissn".equals(type)) { entry.setField(FieldName.ISSN, identifiers.getJSONObject(i).getString("id")); } else if ("eissn".equals(type)) { entry.setField(FieldName.ISSN, identifiers.getJSONObject(i).getString("id")); } } } // Links if (bibJsonEntry.has("link")) { JSONArray links = bibJsonEntry.getJSONArray("link"); for (int i = 0; i < links.length(); i++) { if (links.getJSONObject(i).has("type")) { String type = links.getJSONObject(i).getString("type"); if ("fulltext".equals(type) && links.getJSONObject(i).has("url")) { entry.setField(FieldName.URL, links.getJSONObject(i).getString("url")); } } } } return entry; }
From source file:co.runrightfast.vertx.core.eventbus.EventBusAddress.java
/** * Address format follows a URI path convention. The address path is prefixe with the verticle id's group and name * * e.g., eventBusAddress(verticleId,"path1","path2","path3") returns "/runrightfast/verticle-manager/path1/path2/path3" * * where the verticle id group is "runrightfast" and the verticle id name is "verticle-manager" * * * @param verticleId REQUIRED verticleId * @param path REQUIRED/*from www. j a v a2s. co m*/ * @param paths OPTIONAL * @return eventbus address */ public static String eventBusAddress(@NonNull final RunRightFastVerticleId verticleId, final String path, final String... paths) { checkArgument(isNotBlank(path)); final StringBuilder sb = new StringBuilder(128).append('/').append(verticleId.getGroup()).append('/') .append(verticleId.getName()).append('/').append(path); if (ArrayUtils.isNotEmpty(paths)) { checkArgument(!Arrays.stream(paths).filter(StringUtils::isBlank).findFirst().isPresent()); sb.append('/').append(String.join("/", paths)); } return sb.toString(); }