List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:Main.java
public static void main(String[] a) { String elements[] = { "A", "B", "C", "D", "E" }; HashSet<String> set = new HashSet<String>(Arrays.asList(elements)); elements = new String[] { "E", "F" }; set.addAll(Arrays.asList(elements)); System.out.println(set);//from ww w . j a v a2 s .c om }
From source file:Main.java
public static void main(String[] a) { String elements[] = { "A", "B", "C", "D", "E" }; HashSet<String> set = new HashSet<String>(Arrays.asList(elements)); elements = new String[] { "E", "F" }; set.addAll(Arrays.asList(elements)); System.out.println(set);/*from w w w. ja v a2 s . c o m*/ set.clear(); System.out.println(set); }
From source file:org.apache.nutch.tools.proxy.TestbedProxy.java
/** * @param args/*ww w . ja v a 2 s . com*/ */ public static void main(String[] args) throws Exception { if (args.length == 0) { System.err.println( "TestbedProxy [-seg <segment_name> | -segdir <segments>] [-port <nnn>] [-forward] [-fake] [-delay nnn] [-debug]"); System.err.println("-seg <segment_name>\tpath to a single segment (can be specified multiple times)"); System.err.println("-segdir <segments>\tpath to a parent directory of multiple segments (as above)"); System.err.println( "-port <nnn>\trun the proxy on port <nnn> (special permissions may be needed for ports < 1024)"); System.err.println("-forward\tif specified, requests to all unknown urls will be passed to"); System.err.println("\t\toriginal servers. If false (default) unknown urls generate 404 Not Found."); System.err.println( "-delay\tdelay every response by nnn seconds. If delay is negative use a random value up to nnn"); System.err.println("-fake\tif specified, requests to all unknown urls will succeed with fake content"); System.exit(-1); } Configuration conf = NutchConfiguration.create(); int port = conf.getInt("segment.proxy.port", 8181); boolean forward = false; boolean fake = false; boolean delay = false; boolean debug = false; int delayVal = 0; HashSet<Path> segs = new HashSet<Path>(); for (int i = 0; i < args.length; i++) { if (args[i].equals("-segdir")) { FileSystem fs = FileSystem.get(conf); FileStatus[] fstats = fs.listStatus(new Path(args[++i])); Path[] paths = HadoopFSUtil.getPaths(fstats); segs.addAll(Arrays.asList(paths)); } else if (args[i].equals("-port")) { port = Integer.parseInt(args[++i]); } else if (args[i].equals("-forward")) { forward = true; } else if (args[i].equals("-delay")) { delay = true; delayVal = Integer.parseInt(args[++i]); } else if (args[i].equals("-fake")) { fake = true; } else if (args[i].equals("-debug")) { debug = true; } else if (args[i].equals("-seg")) { segs.add(new Path(args[++i])); } else { LOG.fatal("Unknown argument: " + args[i]); System.exit(-1); } } // Create the server Server server = new Server(); SocketConnector connector = new SocketConnector(); connector.setPort(port); connector.setResolveNames(false); server.addConnector(connector); // create a list of handlers HandlerList list = new HandlerList(); server.addHandler(list); if (debug) { LOG.info("* Added debug handler."); list.addHandler(new LogDebugHandler()); } if (delay) { LOG.info("* Added delay handler: " + (delayVal < 0 ? "random delay up to " + (-delayVal) : "constant delay of " + delayVal)); list.addHandler(new DelayHandler(delayVal)); } // XXX alternatively, we can add the DispatchHandler as the first one, // XXX to activate handler plugins and redirect requests to appropriate // XXX handlers ... Here we always load these handlers Iterator<Path> it = segs.iterator(); while (it.hasNext()) { Path p = it.next(); try { SegmentHandler segment = new SegmentHandler(conf, p); list.addHandler(segment); LOG.info("* Added segment handler for: " + p); } catch (Exception e) { LOG.warn("Skipping segment '" + p + "': " + StringUtils.stringifyException(e)); } } if (forward) { LOG.info("* Adding forwarding proxy for all unknown urls ..."); ServletHandler servlets = new ServletHandler(); servlets.addServletWithMapping(AsyncProxyServlet.class, "/*"); servlets.addFilterWithMapping(LogDebugHandler.class, "/*", Handler.ALL); list.addHandler(servlets); } if (fake) { LOG.info("* Added fake handler for remaining URLs."); list.addHandler(new FakeHandler()); } list.addHandler(new NotFoundHandler()); // Start the http server server.start(); server.join(); }
From source file:biomine.nodeimportancecompression.ImportanceCompressionReport.java
public static void main(String[] args) throws IOException, java.text.ParseException { opts.addOption("algorithm", true, "Used algorithm for compression. Possible values are 'brute-force', " + "'brute-force-edges','brute-force-merges','randomized','randomized-merges'," + "'randomized-edges'," + "'fast-brute-force'," + "'fast-brute-force-merges','fast-brute-force-merge-edges'. Default is 'brute-force'."); opts.addOption("query", true, "Query nodes ids, separated by comma."); opts.addOption("queryfile", true, "Read query nodes from file."); opts.addOption("ratio", true, "Goal ratio"); opts.addOption("importancefile", true, "Read importances straight from file"); opts.addOption("keepedges", false, "Don't remove edges during merges"); opts.addOption("connectivity", false, "Compute and output connectivities in edge oriented case"); opts.addOption("paths", false, "Do path oriented compression"); opts.addOption("edges", false, "Do edge oriented compression"); // opts.addOption( "a", double sigma = 1.0; CommandLineParser parser = new PosixParser(); CommandLine cmd = null;//from w w w . j av a2 s . c om try { cmd = parser.parse(opts, args); } catch (ParseException e) { e.printStackTrace(); System.exit(0); } String queryStr = cmd.getOptionValue("query"); String[] queryNodeIDs = {}; double[] queryNodeIMP = {}; if (queryStr != null) { queryNodeIDs = queryStr.split(","); queryNodeIMP = new double[queryNodeIDs.length]; for (int i = 0; i < queryNodeIDs.length; i++) { String s = queryNodeIDs[i]; String[] es = s.split("="); queryNodeIMP[i] = 1; if (es.length == 2) { queryNodeIDs[i] = es[0]; queryNodeIMP[i] = Double.parseDouble(es[1]); } else if (es.length > 2) { System.out.println("Too many '=' in querynode specification: " + s); } } } String queryFile = cmd.getOptionValue("queryfile"); Map<String, Double> queryNodes = Collections.EMPTY_MAP; if (queryFile != null) { File in = new File(queryFile); BufferedReader read = new BufferedReader(new FileReader(in)); queryNodes = readMap(read); read.close(); } String impfile = cmd.getOptionValue("importancefile"); Map<String, Double> importances = null; if (impfile != null) { File in = new File(impfile); BufferedReader read = new BufferedReader(new FileReader(in)); importances = readMap(read); read.close(); } String algoStr = cmd.getOptionValue("algorithm"); CompressionAlgorithm algo = null; if (algoStr == null || algoStr.equals("brute-force")) { algo = new BruteForceCompression(); } else if (algoStr.equals("brute-force-edges")) { algo = new BruteForceCompressionOnlyEdges(); } else if (algoStr.equals("brute-force-merges")) { algo = new BruteForceCompressionOnlyMerges(); } else if (algoStr.equals("fast-brute-force-merges")) { //algo = new FastBruteForceCompressionOnlyMerges(); algo = new FastBruteForceCompression(true, false); } else if (algoStr.equals("fast-brute-force-edges")) { algo = new FastBruteForceCompression(false, true); //algo = new FastBruteForceCompressionOnlyEdges(); } else if (algoStr.equals("fast-brute-force")) { algo = new FastBruteForceCompression(true, true); } else if (algoStr.equals("randomized-edges")) { algo = new RandomizedCompressionOnlyEdges(); //modified } else if (algoStr.equals("randomized")) { algo = new RandomizedCompression(); } else if (algoStr.equals("randomized-merges")) { algo = new RandomizedCompressionOnlyMerges(); } else { System.out.println("Unsupported algorithm: " + algoStr); printHelp(); } String ratioStr = cmd.getOptionValue("ratio"); double ratio = 0; if (ratioStr != null) { ratio = Double.parseDouble(ratioStr); } else { System.out.println("Goal ratio not specified"); printHelp(); } String infile = null; if (cmd.getArgs().length != 0) { infile = cmd.getArgs()[0]; } else { printHelp(); } BMGraph bmg = BMGraphUtils.readBMGraph(new File(infile)); HashMap<BMNode, Double> queryBMNodes = new HashMap<BMNode, Double>(); for (String id : queryNodes.keySet()) { queryBMNodes.put(bmg.getNode(id), queryNodes.get(id)); } long startMillis = System.currentTimeMillis(); ImportanceGraphWrapper wrap = QueryImportance.queryImportanceGraph(bmg, queryBMNodes); if (importances != null) { for (String id : importances.keySet()) { wrap.setImportance(bmg.getNode(id), importances.get(id)); } } ImportanceMerger merger = null; if (cmd.hasOption("edges")) { merger = new ImportanceMergerEdges(wrap.getImportanceGraph()); } else if (cmd.hasOption("paths")) { merger = new ImportanceMergerPaths(wrap.getImportanceGraph()); } else { System.out.println("Specify either 'paths' or 'edges'."); System.exit(1); } if (cmd.hasOption("keepedges")) { merger.setKeepEdges(true); } algo.compress(merger, ratio); long endMillis = System.currentTimeMillis(); // write importance { BufferedWriter wr = new BufferedWriter(new FileWriter("importance.txt", false)); for (BMNode nod : bmg.getNodes()) { wr.write(nod + " " + wrap.getImportance(nod) + "\n"); } wr.close(); } // write sum of all pairs of node importance added by Fang /* { BufferedWriter wr = new BufferedWriter(new FileWriter("sum_of_all_pairs_importance.txt", true)); ImportanceGraph orig = wrap.getImportanceGraph(); double sum = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { for (int j = i+1; j <= orig.getMaxNodeId(); j++) { sum = sum+ wrap.getImportance(i)* wrap.getImportance(j); } } wr.write(""+sum); wr.write("\n"); wr.close(); } */ // write uncompressed edges { BufferedWriter wr = new BufferedWriter(new FileWriter("edges.txt", false)); ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); for (int i = 0; i <= orig.getMaxNodeId(); i++) { String iname = wrap.intToNode(i).toString(); HashSet<Integer> ne = new HashSet<Integer>(); ne.addAll(orig.getNeighbors(i)); ne.addAll(ucom.getNeighbors(i)); for (int j : ne) { if (i < j) continue; String jname = wrap.intToNode(j).toString(); double a = orig.getEdgeWeight(i, j); double b = ucom.getEdgeWeight(i, j); wr.write(iname + " " + jname + " " + a + " " + b + " " + Math.abs(a - b)); wr.write("\n"); } } wr.close(); } // write distance { // BufferedWriter wr = new BufferedWriter(new // FileWriter("distance.txt",false)); BufferedWriter wr = new BufferedWriter(new FileWriter("distance.txt", true)); //modified by Fang ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); double error = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { HashSet<Integer> ne = new HashSet<Integer>(); ne.addAll(orig.getNeighbors(i)); ne.addAll(ucom.getNeighbors(i)); for (int j : ne) { if (i <= j) continue; double a = orig.getEdgeWeight(i, j); double b = ucom.getEdgeWeight(i, j); error += (a - b) * (a - b) * wrap.getImportance(i) * wrap.getImportance(j); // modify by Fang: multiply imp(u)imp(v) } } error = Math.sqrt(error); //////////error = Math.sqrt(error / 2); // modified by Fang: the error of each // edge is counted twice wr.write("" + error); wr.write("\n"); wr.close(); } // write sizes { ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph comp = merger.getCurrentGraph(); // BufferedWriter wr = new BufferedWriter(new // FileWriter("sizes.txt",false)); BufferedWriter wr = new BufferedWriter(new FileWriter("sizes.txt", true)); //modified by Fang wr.write(orig.getNodeCount() + " " + orig.getEdgeCount() + " " + comp.getNodeCount() + " " + comp.getEdgeCount()); wr.write("\n"); wr.close(); } //write time { System.out.println("writing time"); BufferedWriter wr = new BufferedWriter(new FileWriter("time.txt", true)); //modified by Fang double secs = (endMillis - startMillis) * 0.001; wr.write("" + secs + "\n"); wr.close(); } //write change of connectivity for edge-oriented case // added by Fang { if (cmd.hasOption("connectivity")) { BufferedWriter wr = new BufferedWriter(new FileWriter("connectivity.txt", true)); ImportanceGraph orig = wrap.getImportanceGraph(); ImportanceGraph ucom = merger.getUncompressedGraph(); double diff = 0; for (int i = 0; i <= orig.getMaxNodeId(); i++) { ProbDijkstra pdori = new ProbDijkstra(orig, i); ProbDijkstra pducom = new ProbDijkstra(ucom, i); for (int j = i + 1; j <= orig.getMaxNodeId(); j++) { double oriconn = pdori.getProbTo(j); double ucomconn = pducom.getProbTo(j); diff = diff + (oriconn - ucomconn) * (oriconn - ucomconn) * wrap.getImportance(i) * wrap.getImportance(j); } } diff = Math.sqrt(diff); wr.write("" + diff); wr.write("\n"); wr.close(); } } //write output graph { BMGraph output = bmg;//new BMGraph(bmg); int no = 0; BMNode[] nodes = new BMNode[merger.getGroups().size()]; for (ArrayList<Integer> gr : merger.getGroups()) { BMNode bmgroup = new BMNode("Group", "" + (no + 1)); bmgroup.setAttributes(new HashMap<String, String>()); bmgroup.put("autoedges", "0"); nodes[no] = bmgroup; no++; if (gr.size() == 0) continue; for (int x : gr) { BMNode nod = output.getNode(wrap.intToNode(x).toString()); BMEdge belongs = new BMEdge(nod, bmgroup, "belongs_to"); output.ensureHasEdge(belongs); } output.ensureHasNode(bmgroup); } for (int i = 0; i < nodes.length; i++) { for (int x : merger.getCurrentGraph().getNeighbors(i)) { if (x == i) { nodes[x].put("selfedge", "" + merger.getCurrentGraph().getEdgeWeight(i, x)); //ge.put("goodness", ""+merger.getCurrentGraph().getEdgeWeight(i, x)); continue; } BMEdge ge = new BMEdge(nodes[x], nodes[i], "groupedge"); ge.setAttributes(new HashMap<String, String>()); ge.put("goodness", "" + merger.getCurrentGraph().getEdgeWeight(i, x)); output.ensureHasEdge(ge); } } System.out.println(output.getGroupNodes()); BMGraphUtils.writeBMGraph(output, "output.bmg"); } }
From source file:Main.java
public static void uniqe(ArrayList<String> queryDoc) { // add elements to al, including duplicates HashSet<String> hs = new HashSet<String>(); hs.addAll(queryDoc); queryDoc.clear();// w w w . j a va 2 s .co m queryDoc.addAll(hs); }
From source file:Main.java
public static <T> List<T> mergeLists(List<T> oldList, List<T> newList) { //TreeSet setBoth = new TreeSet(newList); HashSet<T> setBoth = new HashSet<>(newList); setBoth.addAll(oldList); oldList.clear();//from ww w. java 2 s . co m oldList.addAll(setBoth); return oldList; }
From source file:Main.java
public static void uniqe(ArrayList<Integer> items) { // add elements to al, including duplicates HashSet<Integer> hs = new HashSet<Integer>(); hs.addAll(items); items.clear();/*from www . ja v a2 s .c om*/ items.addAll(hs); }
From source file:Main.java
public static <T> Collection<T> union(Collection<T> a, Collection<T> b) { HashSet<T> s = new HashSet<T>(a.size() + b.size()); s.addAll(a); s.addAll(b);//from w w w .j a v a2 s . c o m return s; }
From source file:Main.java
public static <T> HashSet<T> newHashSet(Collection<T> collection) { HashSet<T> set = new HashSet<T>(); set.addAll(collection); return set;/* w w w. j ava2s.c o m*/ }
From source file:Main.java
public static <ELEMENT> HashSet<ELEMENT> newHashSet(Collection<ELEMENT> elements) { final HashSet<ELEMENT> set = newHashSetSized(elements.size()); set.addAll(elements); return set;// w w w .j a va 2 s . c o m }