List of usage examples for java.util LinkedHashSet add
boolean add(E e);
From source file:edu.stanford.muse.groups.SimilarGroupMethods.java
/** select groups above mincount and not subsumed according to maxerror */ private static <T extends Comparable<? super T>> Set<SimilarGroup<T>> selectGroupsWithMinSize( Collection<SimilarGroup<T>> groups, int minSize) { LinkedHashSet<SimilarGroup<T>> selectedGroups = new LinkedHashSet<SimilarGroup<T>>(); for (SimilarGroup<T> g : groups) if (g.size() >= minSize) selectedGroups.add(g); return selectedGroups; }
From source file:com.cenrise.test.azkaban.PropsUtils.java
public static Props resolveProps(final Props props) { if (props == null) { return null; }/*from w ww . j a v a 2s . c om*/ final Props resolvedProps = new Props(); final LinkedHashSet<String> visitedVariables = new LinkedHashSet<>(); for (final String key : props.getKeySet()) { String value = props.get(key); if (value == null) { logger.warn("Null value in props for key '" + key + "'. Replacing with empty string."); value = ""; } visitedVariables.add(key); final String replacedValue = resolveVariableReplacement(value, props, visitedVariables); visitedVariables.clear(); resolvedProps.put(key, replacedValue); } for (final String key : resolvedProps.getKeySet()) { final String value = resolvedProps.get(key); final String expressedValue = resolveVariableExpression(value); resolvedProps.put(key, expressedValue); } return resolvedProps; }
From source file:de.vandermeer.skb.base.utils.Skb_STUtils.java
/** * Returns a set of detailed error messages for missing chunks * @param name name of the STGroup with missing templates (null is valid) * @param missingChunks list of missing chunks * @return a set of error messages detailing the problems, set is of size 0 if no missing chunks *//*from w w w . j a v a2 s. c o m*/ public static final Set<String> getMissingChunksErrorMessages(String name, Map<String, List<String>> missingChunks) { LinkedHashSet<String> ret = new LinkedHashSet<String>(); String msgST = "template group<if(%1)> \\<<%1>><endif> does not specify mandatory string template \\<<%2>>"; String msgArg = "template group<if(%1)> \\<<%1>><endif> with string template \\<<%2>> does not define argument \\<<%3>>"; Set<String> keys = missingChunks.keySet(); for (String missing : keys) { List<String> missingArgs = missingChunks.get(missing); if (missingArgs == null || missingArgs.size() == 0) { //template is missing altogether ret.add(ST.format(msgST, name, missing)); } else { // template exists but fails to specify an argument for (String arg : missingArgs) { ret.add(ST.format(msgArg, name, missing, arg)); } } } return ret; }
From source file:org.openhab.binding.network.service.NetworkService.java
/** * Takes the interfaceIPs and fetches every IP which can be assigned on their network * * @param networkIPs The IPs which are assigned to the Network Interfaces * @return Every single IP which can be assigned on the Networks the computer is connected to *//*from w w w.j a v a2s . c o m*/ private static LinkedHashSet<String> getNetworkIPs(TreeSet<String> interfaceIPs) { LinkedHashSet<String> networkIPs = new LinkedHashSet<String>(); for (Iterator<String> it = interfaceIPs.iterator(); it.hasNext();) { try { // gets every ip which can be assigned on the given network SubnetUtils utils = new SubnetUtils(it.next()); String[] addresses = utils.getInfo().getAllAddresses(); for (int i = 0; i < addresses.length; i++) { networkIPs.add(addresses[i]); } } catch (Exception ex) { } } return networkIPs; }
From source file:de.qaware.chronix.solr.query.analysis.SolrDocumentBuilder.java
/** * Merges to sets of time series attributes. * The result is set for each key holding the values. * If the other value is a collection, than all values * of the collection are added instead of the collection object. * * @param merged the merged attributes * @param attributes the attributes of the other time series */// www . ja v a2 s. com private static void merge(Map<String, Object> merged, Map<String, Object> attributes) { for (HashMap.Entry<String, Object> newEntry : attributes.entrySet()) { String key = newEntry.getKey(); //we ignore the version in the result if (key.equals(ChronixQueryParams.SOLR_VERSION_FIELD)) { continue; } if (!merged.containsKey(key)) { merged.put(key, new LinkedHashSet()); } LinkedHashSet values = (LinkedHashSet) merged.get(key); Object value = newEntry.getValue(); //Check if the value is a collection. //If it is a collection we add all values instead of adding a collection object if (value instanceof Collection && !values.contains(value)) { values.addAll((Collection) value); } else if (!values.contains(value)) { //Otherwise we have a single value or an array. values.add(value); } //otherwise we ignore the value } }
From source file:com.qmetry.qaf.automation.ui.UiDriverFactory.java
private static Collection<QAFWebDriverCommandListener> getDriverListeners() { LinkedHashSet<QAFWebDriverCommandListener> listners = new LinkedHashSet<QAFWebDriverCommandListener>(); String[] clistners = ConfigurationManager.getBundle() .getStringArray(ApplicationProperties.WEBDRIVER_COMMAND_LISTENERS.key); for (String listenr : clistners) { try {/*from ww w .j av a 2 s.c o m*/ QAFWebDriverCommandListener cls = (QAFWebDriverCommandListener) Class.forName(listenr) .newInstance(); listners.add(cls); } catch (Exception e) { logger.error("Unable to register listener class " + listenr, e); } } clistners = ConfigurationManager.getBundle().getStringArray(ApplicationProperties.QAF_LISTENERS.key); for (String listener : clistners) { try { QAFListener cls = (QAFListener) Class.forName(listener).newInstance(); if (QAFWebDriverCommandListener.class.isAssignableFrom(cls.getClass())) listners.add((QAFWebDriverCommandListener) cls); } catch (Exception e) { logger.error("Unable to register class as driver listener: " + listener, e); } } return listners; }
From source file:de._13ducks.cor.game.server.movement.SectorPathfinder.java
/** * Der Start und Zielknoten sind von weit mehr als nur den Knoten ihres Polygons erreichbar. * Dies muss bereits whrend der Basic-Berechnung beachtet werden, das kann die Pfadoptimierung nachtrglich nichtmehr leisten. * Also alle Nodes suchen, die ohne Hinderniss direkt erreichbar sind * @param from alle vollstndig im Mesh liegenden Kanten von hier zu Nachbarknoten suchen * @param basicPolygon Der Polygon, in dem der Node drinliegt. * @return alle direkt erreichbaren Knoten (natrlich sind die des basicPolygons auch dabei) */// ww w . j a va2s .c o m private static Node[] computeDirectReachable(Node from, FreePolygon basicPolygon) { // Das ist eine modifizierte Breitensuche: LinkedList<FreePolygon> open = new LinkedList<FreePolygon>(); // Queue fr zu untersuchende Polygone LinkedHashSet<FreePolygon> openContains = new LinkedHashSet<FreePolygon>(); // Welche Elemente die open enthlt (schnellerer Test) LinkedHashSet<FreePolygon> closed = new LinkedHashSet<FreePolygon>(); LinkedHashSet<Node> testedNodes = new LinkedHashSet<Node>(); LinkedList<Node> result = new LinkedList<Node>(); open.offer(basicPolygon); // Start-Polygon openContains.add(basicPolygon); while (!open.isEmpty()) { // Diesen hier bearbeiten wir jetzt FreePolygon poly = open.poll(); openContains.remove(poly); closed.add(poly); boolean containsreachableNodes = false; // Alle Nodes dieses Knotens untersuchen for (Node node : poly.getNodes()) { // Schon bekannt? if (result.contains(node)) { // Bekannt und ok containsreachableNodes = true; } else { if (testedNodes.contains(node)) { // Der geht nicht } else { // Testen! FreePolygon currentPoly = basicPolygon; // Testweise Kante zwischen from und node erstellen Edge edge = new Edge(from, node); // Im Folgenden wird untersucht, ob der neue Weg "edge" passierbar ist. // Damit wir beim Dreieckwechsel nicht wieder zurck gehen: Node lastNode = null; boolean routeAllowed = true; // Jetzt so lange weiter laufen, bis wir im Ziel-Polygon sind while (!node.getPolygons().contains(currentPoly)) { // Untersuchen, ob es eine Seite des currentPolygons gibt, die sich mit der alternativRoute schneidet List<Edge> edges = currentPoly.calcEdges(); Edge intersecting = null; SimplePosition intersection = null; for (Edge testedge : edges) { // Gibts da einen Schnitt? intersection = edge.intersectionWithEndsNotAllowed(testedge); if (intersection != null && !intersection.equals(lastNode)) { intersecting = testedge; break; } } // Kandidat fr den nchsten Polygon FreePolygon nextPoly = null; // Kante gefunden if (intersecting != null) { // Von dieser Kante die Enden suchen nextPoly = getOtherPoly(intersecting.getStart(), intersecting.getEnd(), currentPoly); } if (intersecting != null && nextPoly != null) { // Wir haben einen Schnittpunkt und eine Kante gefunden, sind jetzt also in einem neuen Polygon // Extra Node bentigt Node extraNode = intersection.toNode(); extraNode.addPolygon(nextPoly); extraNode.addPolygon(currentPoly); lastNode = extraNode; currentPoly = nextPoly; // Der nchste Schleifendurchlauf wird den nchsten Polygon untersuchen } else { // Es gab leider keinen betretbaren Polygon hier. // Das bedeutet, dass wir die Suche abbrechen knnen, es gibt hier keinen direkten Weg routeAllowed = false; break; } } // Wenn der neue Weg gltig war, einbauen. Sonst weiter mit dem nchsten Knoten if (routeAllowed) { // In die erlaubt-Liste: result.add(node); testedNodes.add(node); containsreachableNodes = true; } else { testedNodes.add(node); } } } } // Nur weiter in die Tiefe gehen, wenn mindestens einer erreichbar war if (containsreachableNodes) { // Alle Nachbarn untersuchen: for (FreePolygon n : poly.getNeighbors()) { // Schon bekannt/bearbeitet? if (!openContains.contains(n) && !closed.contains(n)) { // Nein, also auch zur Bearbeitung vorsehen open.add(n); openContains.add(n); } } } } return result.toArray(new Node[0]); }
From source file:com.bytelightning.opensource.pokerface.ScriptHelperImpl.java
/** * @see AddMimeEntry/*from w ww. j a va2 s .co m*/ */ private static void AddMimeEntryImpl(String type, String extensions) { LinkedHashSet<String> extSet = new LinkedHashSet<String>(); MimeEntry entry = MimeHashTable.find(type); if (entry == null) entry = new sun.net.www.MimeEntry(type.intern()); // Ensure the type is an interned string entry.setType(type.intern()); String[] existing = entry.getExtensions(); if (existing != null) for (String ext : existing) extSet.add(ext); String[] additional = extensions.split(","); for (int i = 0; i < additional.length; i++) { additional[i] = additional[i].trim().toLowerCase(); if (additional[i].length() == 0) throw new RuntimeException("Invalid mime extensions for: " + type); if (additional[i].charAt(0) != '.') throw new RuntimeException("mime extensions must start with a '.' (" + type + ")"); extSet.add(additional[i]); } StringBuffer sb = new StringBuffer(); for (String ext : extSet) { if (sb.length() > 0) sb.append(','); sb.append(ext); } entry.setExtensions(sb.toString()); // This little hack ensures that the MimeEntry itself has interned strings in it's list. Yes it's a trade off between bad practice and speed. String[] processed = entry.getExtensions(); for (int i = 0; i < processed.length; i++) processed[i] = processed[i].intern(); }
From source file:net.yacy.document.TextParser.java
private static void initParser(final Parser parser) { String prototypeMime = null;//from w w w. ja v a 2 s . com for (final String mime : parser.supportedMimeTypes()) { // process the mime types final String mimeType = normalizeMimeType(mime); if (prototypeMime == null) prototypeMime = mimeType; LinkedHashSet<Parser> p0 = mime2parser.get(mimeType); if (p0 == null) { p0 = new LinkedHashSet<Parser>(); mime2parser.put(mimeType, p0); } p0.add(parser); AbstractParser.log.info("Parser for mime type '" + mimeType + "': " + parser.getName()); } if (prototypeMime != null) for (String ext : parser.supportedExtensions()) { ext = ext.toLowerCase(Locale.ROOT); final String s = ext2mime.get(ext); if (s != null && !s.equals(prototypeMime)) AbstractParser.log.info("Parser for extension '" + ext + "' was set to mime '" + s + "', overwriting with new mime '" + prototypeMime + "'."); ext2mime.put(ext, prototypeMime); } for (String ext : parser.supportedExtensions()) { // process the extensions ext = ext.toLowerCase(Locale.ROOT); LinkedHashSet<Parser> p0 = ext2parser.get(ext); if (p0 == null) { p0 = new LinkedHashSet<Parser>(); ext2parser.put(ext, p0); } p0.add(parser); AbstractParser.log.info("Parser for extension '" + ext + "': " + parser.getName()); } }
From source file:org.jahia.modules.defaultmodule.StartPublicationWorkflowAction.java
public ActionResult doExecute(HttpServletRequest req, RenderContext renderContext, Resource resource, JCRSessionWrapper session, Map<String, List<String>> parameters, URLResolver urlResolver) throws Exception { String process = parameters.get("process").get(0); String workflowDefinitionKey = StringUtils.substringAfter(process, ":"); String providerKey = StringUtils.substringBefore(process, ":"); Map<String, Object> map = getVariablesMap(parameters); final LinkedHashSet<String> languages = new LinkedHashSet<String>(); languages.add(resource.getLocale().toString()); final List<PublicationInfo> infoList = publicationService.getPublicationInfo( resource.getNode().getIdentifier(), languages, true, true, false, resource.getNode().getSession().getWorkspace().getName(), "live"); map.put("publicationInfos", infoList); workflowService.startProcessAsJob(Arrays.asList(resource.getNode().getIdentifier()), session, workflowDefinitionKey, providerKey, map, null); return ActionResult.OK_JSON; }