List of usage examples for java.util Set addAll
boolean addAll(Collection<? extends E> c);
From source file:Main.java
public static Set<Node> findChildElementsByTag(Node node, String tag) { final Set<Node> result = new LinkedHashSet<>(); for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { if (tag.equals(child.getNodeName())) { result.add(child);// w w w . j a v a2 s. c o m } else if (child.hasChildNodes()) { result.addAll(findChildElementsByTag(child, tag)); } } return result; }
From source file:com.espertech.esper.event.vaevent.PropertyUtility.java
/** * Remove from values all removeValues and build a unique sorted result array. * @param values to consider/* w w w . j a va 2 s . c o m*/ * @param removeValues values to remove from values * @return sorted unique */ protected static String[] uniqueExclusiveSort(String[] values, String[] removeValues) { Set<String> unique = new HashSet<String>(); unique.addAll(Arrays.asList(values)); for (String removeValue : removeValues) { unique.remove(removeValue); } String[] uniqueArr = unique.toArray(new String[unique.size()]); Arrays.sort(uniqueArr); return uniqueArr; }
From source file:net.audumla.climate.ClimateDataFactory.java
/** * Replaces the climate data with a readonly version. * * @param cd the existing climate data bean * @return the climate data//from ww w . j av a 2s. c o m */ public static ClimateData convertToReadOnlyClimateData(ClimateData cd) { if (cd == null) { return null; } Set<Class<?>> interfaces = new LinkedHashSet<Class<?>>(); interfaces.addAll(ClassUtils.getAllInterfaces(cd.getClass())); interfaces.remove(WritableClimateData.class); return BeanUtils.convertBean(cd, interfaces.toArray(new Class<?>[interfaces.size()])); }
From source file:net.audumla.climate.ClimateDataFactory.java
/** * Replaces the climate observation with a readonly version. * * @param cd the existing climate data bean * @return the climate data/*from w w w . j a v a 2 s .c o m*/ */ public static WritableClimateObservation convertToWritableClimateObservation(ClimateObservation cd) { if (cd == null) { return null; } Set<Class<?>> interfaces = new LinkedHashSet<Class<?>>(); interfaces.addAll(ClassUtils.getAllInterfaces(cd.getClass())); return BeanUtils.convertBean(cd, WritableClimateObservation.class, interfaces.toArray(new Class<?>[interfaces.size()])); }
From source file:net.audumla.climate.ClimateDataFactory.java
/** * Replaces the climate data with a writable version. * * @param cd the existing climate data bean * @return the climate data//from w ww . j ava 2 s.c o m */ public static WritableClimateData convertToWritableClimateData(ClimateData cd) { if (cd == null) { return null; } Set<Class<?>> interfaces = new LinkedHashSet<Class<?>>(); interfaces.addAll(ClassUtils.getAllInterfaces(cd.getClass())); return BeanUtils.convertBean(cd, WritableClimateData.class, interfaces.toArray(new Class<?>[interfaces.size()])); }
From source file:net.audumla.climate.ClimateDataFactory.java
/** * Replaces the climate observation with a readonly version. * * @param cd the existing climate data bean * @return the climate data/*from ww w.jav a 2 s. c o m*/ */ public static ClimateObservation convertToReadOnlyClimateObservation(ClimateObservation cd) { if (cd == null) { return null; } Set<Class<?>> interfaces = new LinkedHashSet<Class<?>>(); interfaces.addAll(ClassUtils.getAllInterfaces(cd.getClass())); interfaces.remove(WritableClimateObservation.class); return BeanUtils.convertBean(cd, interfaces.toArray(new Class<?>[interfaces.size()])); }
From source file:Main.java
/** * Returns a {@link Collection} containing the intersection of the given * {@link Collection}s.//from w w w. jav a 2 s .c o m * <p> * The cardinality of each element in the returned {@link Collection} will * be equal to the minimum of the cardinality of that element in the two * given {@link Collection}s. * * @param a * the first collection, must not be null * @param b * the second collection, must not be null * @return the intersection of the two collections * @see Collection#retainAll * @see #containsAny */ public static <E> Collection<E> intersection(final Collection<E> a, final Collection<E> b) { if (a == null || b == null) { return null; } List<E> list = getArrayList(); Map<E, Integer> mapa = getCardinalityMap(a); Map<E, Integer> mapb = getCardinalityMap(b); Set<E> elts = getHashSet(a); elts.addAll(b); for (E e : elts) { for (int i = 0, m = Math.min(getFreq(e, mapa), getFreq(e, mapb)); i < m; i++) { list.add(e); } } return list; }
From source file:Main.java
/** * Parses a string parameter value into a set of strings. * /*from w ww .j a va2 s. c om*/ * @param values The values of the set. * @return The set. */ public static Set<String> parseParameterList(String values) { Set<String> result = new TreeSet<String>(); if (values != null && values.trim().length() > 0) { // the spec says the scope is separated by spaces, but Facebook uses commas, so we'll include commas, too. String[] tokens = values.split("[\\s+,]"); result.addAll(Arrays.asList(tokens)); } return result; }
From source file:Main.java
/** * Returns a {@link Collection} containing the exclusive disjunction * (symmetric difference) of the given {@link Collection}s. * <p>/*from w w w .j a va2 s.c o m*/ * The cardinality of each element <i>e</i> in the returned * {@link Collection} will be equal to * <tt>max(cardinality(<i>e</i>,<i>a</i>),cardinality(<i>e</i>,<i>b</i>)) - min(cardinality(<i>e</i>,<i>a</i>),cardinality(<i>e</i>,<i>b</i>))</tt>. * <p> * This is equivalent to * <tt>{@link #subtract subtract}({@link #union union(a,b)},{@link #intersection intersection(a,b)})</tt> * or * <tt>{@link #union union}({@link #subtract subtract(a,b)},{@link #subtract subtract(b,a)})</tt>. * * @param a * the first collection, must not be null * @param b * the second collection, must not be null * @return the symmetric difference of the two collections */ public static <E> Collection<E> disjunction(final Collection<E> a, final Collection<E> b) { if (a == null || b == null) { return null; } List<E> list = getArrayList(); Map<E, Integer> mapa = getCardinalityMap(a); Map<E, Integer> mapb = getCardinalityMap(b); Set<E> elts = getHashSet(a); elts.addAll(b); for (E e : elts) { for (int i = 0, m = ((Math.max(getFreq(e, mapa), getFreq(e, mapb))) - (Math.min(getFreq(e, mapa), getFreq(e, mapb)))); i < m; i++) { list.add(e); } } return list; }
From source file:edu.usc.polar.CompositeNERAgreementParser.java
public static void CompositeNER(String doc, String args[]) { try {/*from ww w.j a v a2s . c om*/ String text; AutoDetectParser parser = new AutoDetectParser(); BodyContentHandler handler = new BodyContentHandler(); Metadata metadata = new Metadata(); InputStream stream = new FileInputStream(doc); // System.out.println(stream.toString()); parser.parse(stream, handler, metadata); // return handler.toString(); text = handler.toString(); String metaValue = metadata.toString(); System.out.println(metaValue + "Desc:: " + metadata.get("description")); String[] example = new String[1]; example[0] = text; String name = doc.replace("C:\\Users\\Snehal\\Documents\\TREC-Data\\Data", "polar.usc.edu"); name = name.replace("\\", "."); Map<String, Set<String>> list = getCoreNLP(text); Map<String, Set<String>> list1 = getOpenNLP(text); Map<String, Set<String>> list2 = getNLTKRest(text); Set<String> NLTKRestSet = combineSets(list2); Set<String> coreNLPSet = combineSets(list); Set<String> openNLPSet = combineSets(list1); /* System.out.println("list coreNLP"+JSONStringify(coreNLPSet).toJSONString()); System.out.println("list openNLPSet"+openNLPSet); System.out.println("list NLTKRestSet"+NLTKRestSet); */ JSONObject jsonObj = new JSONObject(); jsonObj.put("DOI", name); jsonObj.put("OpenNLP", JSONStringify(openNLPSet)); jsonObj.put("NLTKRest", JSONStringify(NLTKRestSet)); jsonObj.put("CoreNLP", JSONStringify(coreNLPSet)); Set<String> union = new HashSet(); union.addAll(NLTKRestSet); union.addAll(coreNLPSet); union.addAll(openNLPSet); jsonObj.put("Union", JSONStringify(union)); Set<String> intersection = new HashSet(); intersection.addAll(union); intersection.retainAll(coreNLPSet); intersection.retainAll(openNLPSet); intersection.retainAll(NLTKRestSet); jsonObj.put("Agreement", JSONStringify(intersection)); /* System.out.println(name+"\n"+openNLPSet.size()+openNLPSet.toString()+ "\n"+coreNLPSet.size()+coreNLPSet.toString()+ "\n"+NLTKRestSet.size()+NLTKRestSet.toArray()+ "\n"+intersection.size()+intersection.toArray()+ "\n"+union.size()+union.toArray()); */ //jsonObj.put("metadata",metaValue.replaceAll("\\s\\s+|\n|\t"," ")); jsonArray.add(jsonObj); if (intersection.size() > 0) { jsonAgree.add(jsonObj); JSONArray jArr = new JSONArray(); jArr.add(jsonObj); metadata.add("CompositeNER", jArr.toJSONString()); } } catch (Exception e) { System.out.println("ERROR : OpenNLP" + "|File Name" + doc.replaceAll("C:\\Users\\Snehal\\Documents\\TREC-Data", "") + " direct" + e.toString()); } }